__author__ = 'anorberg'

#TODO: insert GPL2 and ISB copyright

#code stolen from the ACLs & Rights dev guide

import httplib2
import sys
import apiclient.discovery
import apiclient.errors
import apiclient.http
import pprint
from oauth2client import client as o2client
from oauth2client import tools as o2tools
from oauth2client.file import Storage

_OAUTH_FLOW = o2client.OAuth2WebServerFlow(
  client_id = "577991774362.apps.googleusercontent.com",
  client_secret = "AlrASRiOz7DZAfoS8s3oEE9s",
  scope="https://www.googleapis.com/auth/bigquery",
  user_agent="simplebq2/0.1"
)

_DEFAULT_CREDENTIALS_PATH = "oauth.crd" #in a later version, support actually choosing a credentials path

_AUTO_REQUEST = True

_BQ_SERVICE_NAME = "bigquery"
_BQ_VERSION_STRING = "v2beta1"
_URI_DELIMITER = "\\s" #all whitespace
_GS_ROOT_URI = "commondatastorage.googleapis.com"

class CLIError(Exception):
  pass

def authorize(cred_path = _DEFAULT_CREDENTIALS_PATH):
  storage = Storage(cred_path)
  credentials = o2tools.run(_OAUTH_FLOW, storage)
  return credentials

def doSomethingOnBQ(cred_path, fxn, fxn_args):
  cred_store = Storage(cred_path)
  credentials = cred_store.get()

  if credentials is None or credentials.invalid:
    if _AUTO_REQUEST:
      credentials = authorize(cred_path)
    else:
      raise o2client.AccessTokenCredentialsError("Missing or expired OAuth2 credentials.")

  transport = httplib2.Http()
  transport = credentials.authorize(transport)

  service_conn = apiclient.discovery.build(_BQ_SERVICE_NAME, _BQ_VERSION_STRING, http=transport)

  pprint.pprint(fxn(service_conn, fxn_args))


def fix_gs_uri(candidate_uri):
  if candidate_uri.startswith("gs://"):
    return candidate_uri
  elif candidate_uri.startswith("http://"+_GS_ROOT_URI):
    #7: len("http://") + trailing slash on GS URI - zero-index
    return "gs://" + candidate_uri[7+len(_GS_ROOT_URI):]
  elif candidate_uri.startswith("https://"+_GS_ROOT_URI):
    return "gs://" + candidate_uri[8+len(_GS_ROOT_URI):]
  elif candidate_uri.startswith("https://"):
    #this and the next are sketchy, but we might as well try
    return "gs://" + candidate_uri[6:]
  elif candidate_uri.startswith("http://"):
    return "gs://" + candidate_uri[7:]
  else:
    return "gs://" + candidate_uri #most likely just a flat path

def bq_append_csv_op(service, args):
  if len(args) < 4:
    raise CLIError("Append_CSV takes exactly four parameters: table-project-ID, table-dataset-ID, table-ID, GoogleStorage-URI-to-CSV-to-append")

  projectId = args[0]
  sourceCsvUriList = args[3]
  sourceCsvUris = sourceCsvUriList.split(_URI_DELIMITER)
  datasetId = args[1]
  tableId = args[2]

  if len(args) > 4:
    #failure to quote additional URIs? okay, cool
    for arg in args[4:]:
      sourceCsvUris.append(arg)

  #fix to GS URIs
  sourceCsvUrisFixed = [fix_gs_uri(foo) for foo in sourceCsvUris]

  #Learn the schema, and verify the destination table exists
  tableResource = service.tables().get(projectId=projectId, datasetId=datasetId, tableId=tableId).execute()
  #That'll raise a 404 if the destination table is missing. Intentional.

  schema = tableResource["schema"]

  #2. Paste that schema in to perform the append
  jobRequestData = {
    "projectId" : projectId,
    "configuration":{
      "load":{
        "sourceUris":sourceCsvUrisFixed,
        "schema" : schema,
        "destinationTable" : {
          "projectId" : projectId,
          "datasetId" : datasetId,
          "tableId" : tableId
        },
        "createDisposition" : "CREATE_NEVER",
        "writeDisposition" : "WRITE_APPEND"
      }
    }
  }
  return service.jobs().insert(projectId=projectId, body=jobRequestData).execute()

def bq_job_status_op(service, args):
  if len(args) != 2:
    raise CLIError("Status takes exactly two parameters: projectID, jobID")
  return service.jobs().get(projectId = args[0], jobId = args[1]).execute()

def print_usage():
  print r"""
  simplebq.py - a stripped-down client for authenticating and operating on Google BigQuery
  Usage: <python> simplebq.py <command> <arguments for command>

  Command        | Description
                 |     Command Argument |  Description
  ----------------------------------------------------------------------------------------
  authorize      | Connects to Google's authentication server. Follow the instructions to
                 | authenticate yourself using OAuth2. The key will be saved in the current
                 | directory as oauth.crd, and will be loaded for subsequent operations.
                 |
  append_csv     | Appends 1 or more CSV files already in Google Storage to a BigQuery
                 | table that already exists. Not able to create new tables.
                 | Four positional arguments are required:
                 |     <projectId> <datasetId> <tableId> <source-CSV-list>
                 |     projectId        | ID of the project for the destination table.
                 |     datasetId        | ID of the dataset for the destination table.
                 |     tableId          | ID of the destination table.
                 |     source-csv-list  | A whitespace-delimited list of URIs. Each one
                 |                      | must refer to a CSV file available in Google
                 |                      | Storage to append to the specified table.
                 |                      | This list may be, but is not required to be,
                 |                      | quoted. (It is parsed as a list, but additional
                 |                      | arguments to the command are taken as items
                 |                      | that should be on the end of the list.)
                 |
  status         | Gets the status of an already-submitted job, from its jobId.
                 | This will generally be the jobId field given in a previous response
                 | from the server during a previous simplebq (or related) call.
                 | Two positional arguments are required:
                 | <projectId> <jobId>
                 |     projectId        | ID of the project the job was submitted against.
                 |     jobId            | jobId from the run job. Specifically the
                 |                      | unqualified jobId; the fully-qualified "id" field
                 |                      | is not correct to use here.

  Successful commands pretty-print the response from the server to standard output.
  Unsuccessful commands will raise exceptions with hopefully-informative error messages.
  HTTPExceptions representing a 404 usually mean your project, dataset, or table ID is wrong.
  Authentication is interactive, so its entire process happens on standard I/O.
  """


_OPERATION_DICT ={
  'append_csv' : bq_append_csv_op,
  'appendcsv'  : bq_append_csv_op,
  'status'     : bq_job_status_op
}

def main(args):
  cmd = args[1].lower()

  if cmd == 'authorize':
    return authorize()
  elif cmd=="-?" or cmd == 'help' :
    print_usage()
    return None
  else:
    fxn = None
    try:
      fxn = _OPERATION_DICT[cmd]
    except KeyError:
      print "Unknown operation:", cmd
      print_usage()
      raise
    return doSomethingOnBQ(_DEFAULT_CREDENTIALS_PATH, fxn, args[2:])


if __name__ == "__main__":
  try:
    main(sys.argv)
  except CLIError as error:
    print "Incorrect options:", error.message
    print_usage()
    raise
