#!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This code is not supported by Google
#

"""
Sample script to upload data feed files to Google Cloud Storage.

Google Commerce Search (GCS) and Product Search users can use this script
to automate large feed file submission as an alternative to HTTP-fetch, FTP
and manual feed uploads through Merchant Center.

Feeds automated through Cloud Storage offers enhanced security, resumable 
transfers, API services and ability to upload files upto 10GB in size.  
For more information, see:

http://support.google.com/merchants/bin/answer.py?answer=185963

SETUP:
  Requires python 2.6
  1. Enable Google Storage integration with Merchant Center
  
  2. Install GSUtil
      https://developers.google.com/storage/docs/gsutil_install
     Confirm access to the merchantcenter bucket using gsutil
      gsutil ls -b -l gs://merchantcenter<your_merchant_id>
  
  3. Install Google Storage and oauth2_plugin python library
      https://developers.google.com/storage/docs/gspythonlibrary
      set PYTHONPATH to include oauth2_plugin (assume gsutil uzipped to $HOME)
      export PYTHONPATH=${PYTHONPATH}:$HOME/gsutil/boto:$HOME/gsutil:$HOME/gsutil/oauth2_plugin/:
       
  4. View the <your_homedir>/.boto file created from step (2) and note the 
       gs_oauth2_refresh_token parameter
       
REFERENCE : 
    http://cloud.google.com/products/cloud-storage.html
    https://developers.google.com/storage/docs/gspythonlibrary
    http://docs.pythonboto.org/en/latest/index.html
    http://docs.pythonboto.org/en/latest/ref/gs.html
    https://developers.google.com/storage/docs/gsutil_install#authenticate

USAGE: 
    python gs_processor.py --bucket= --dest_key= --feed_file=  --refresh_token=
    
     bucket=  Your merchantcenter bucket (eg merchantcenter8437442)
     
     dest_key= Destination feed name
               NOTE this is the name of the feed.  In Merchant Center, goto
               "Data Feeds" and note the name of the feed you are trying to 
               upload.
               
     feed_file= Source feed file to upload
     
     refresh_token= oauth2 Refresh token for google cloudstore
     
 example:
 ./gs_processor.py --bucket=merchantcenter8437442 
                   --dest_key=feedname
                   --feed_file=/tmp/somefeed.xml
                   --refresh_token=1/W3-empZhaFqpig1M7Xa92SJZjLqUnAFaD-XCKAHUBTg
                                               
"""

import os
import sys
import getopt
import logging
import datetime
from ConfigParser import DuplicateSectionError

import boto.exception
from boto.gs.key import Key
from boto.gs.resumable_upload_handler import ResumableUploadHandler
from boto import storage_uri
from boto import Config
from oauth2_plugin import oauth2_plugin

class gs_processor(object):

  def __init__(self,bucket,dest_key,feed_file,refresh_token):    

    self.GCS_BUCKET = bucket
    self.FEED_FILE = feed_file
    self.DEST_KEY = dest_key
    
    config = boto.config

    self.OAUTH2_REFRESH_TOKEN = refresh_token
    
    self.log("Start Transfer", logging.INFO)
    
    # override the .boto file configuration information
    try:
      config.add_section('Credentials')
      config.add_section('Boto')
    except DuplicateSectionError, e:
      pass
      #self.log("Using existing .boto file", logging.INFO)
    
    # alternative to using oauth2 refresh token is to enable the 
    # interop access keys on the google storage API console
    #config.set('Credentials', 'gs_access_key_id', '<YOUR ACCESS KEY_ID>' )
    #config.set('Credentials', 'gs_secret_access_key', '<YOUR ACCESS KEY>')
        
    config.set('Boto','https_validate_certificates','true')  
    config.set('Credentials','gs_oauth2_refresh_token',self.OAUTH2_REFRESH_TOKEN)            

    try:
      uri = boto.storage_uri('gs://' + self.GCS_BUCKET)
      bucket =  uri.get_bucket()
    except boto.exception.BotoServerError, e:
      self.log("Error connecting " + str(e), logging.ERROR)
      sys.exit(-1)      
    except boto.exception.TooManyAuthHandlerReadyToAuthenticate, e:
      self.log("TooManyAuthHandlerReadyToAuthenticate: " + str(e), logging.ERROR)
      sys.exit(-1)      
    except boto.exception.NoAuthHandlerFound, e:
      self.log("NoAuthHandlerFound: " + str(e), logging.ERROR)
      sys.exit(-1)     
    except oauth2_plugin.oauth2_client.AccessTokenRefreshError,e:
      self.log("AccessTokenRefreshError: " + str(e), logging.ERROR)
      sys.exit(-1)
    except oauth2_plugin.oauth2_client.AuthorizationCodeExchangeError:
      self.log ("AuthorizationCodeExchangeError", logging.ERROR)
      sys.exit(-1) 

    try:
      self.log('Source file: ' +  self.FEED_FILE + ' ' + 
               str(os.path.getsize(self.FEED_FILE)) + ' bytes',logging.INFO)
      
      # optionally validate the .csv feed file
      # for preprocessing and basic validation
      # http://code.google.com/p/gcs-admin-toolkit/source/browse/trunk/src/python/validate_feed.py
      #from validate_feed import validate_feed
      #validator = validate_feed(self.FEED_FILE,self.DEST_KEY,'csv')
      #if (validator.validate()):
      #  self.log('Source file validated',logging.INFO)
      #else:
      #  errors = validator.ALERT_MESSAGES
      #  self.log('Unable to validate feed file ' + str(errors),logging.ERROR)

      #if (len(validator.ALERT_MESSAGES)>0):
      #  self.log('Not proceeding with upload',logging.ERROR)
        #sys.exit(-1) 
            
      res_upload_handler = ResumableUploadHandler()
      cb = self.progress 
      key = bucket.new_key(self.DEST_KEY)
      
      key.set_contents_from_filename(self.FEED_FILE, cb=cb, replace=True, res_upload_handler=res_upload_handler)
      self.log('Transfer complete: ' + ' ' + uri.scheme + '://' +  
               uri.bucket_name + '/' + key.name + ' ' + str(key.size) +  ' bytes',
               logging.INFO)
      
      # optionally perform postprocessing validation using the Data Feeds API
      # http://code.google.com/p/gcs-admin-toolkit/source/browse/trunk/src/auth/gcs_auth.py
      # http://code.google.com/p/gcs-admin-toolkit/wiki/GCSAuthentication
      #from gcs_auth import gcs_auth
      #g = gcs_auth('<YOUR OAUTH LOGIN>','<YOUR OAUTH PRIVATE KEY')      
      #validator.verifyTransfer('<YOUR MERCHANT ID>',g.read_token(),[validator.COMMERCE_SEARCH_USECASE])
      
      # to recall feed history
      #print validator.feed_history('feedname')
      
      # to recall item history 
      #print validator.offer_history('100014')
      
      # if ALERT_MESSAGES >0, then there are errors. do something here
      # to alert you of feed errors (i.,e send email, page, etc)gc
      #print validator.ALERT_MESSAGES
      
    except boto.exception.GSResponseError, e:
      self.log(str("Upload Error " + str(e.error_message)), logging.ERROR)
    except IOError, e: 
      self.log("Feed_File IOError " + str(e), logging.ERROR)
    self.log('End Transfer', logging.INFO)
      
  def progress(self, total_bytes_transferred, total_size):
    self.log(str('Transferring: ' + str(total_bytes_transferred) + '/' + str(total_size) + ' bytes'),logging.INFO)
   
  def log(self,msg, loglevel):
    LOG_FILENAME = 'feed_' + self.GCS_BUCKET + '_' + self.DEST_KEY + '.log'   
    logging.basicConfig(filename=LOG_FILENAME,level=logging.INFO)
    m = ('[%s] %s') % (datetime.datetime.now(), msg)
    print m
    if (loglevel == logging.DEBUG):
      logging.debug(m)  
    elif (loglevel == logging.ERROR):
      logging.error(m)         
    else:
      logging.info(m)  
      
if __name__ == '__main__':
  feed_file = None
  bucket = None
  dest_key = None
  refresh_token = None

  try:
    opts, args = getopt.getopt(sys.argv[1:], None, ["bucket=", "dest_key=", "feed_file=",
                                                    "refresh_token="])
  except getopt.GetoptError:
    print 'Please specify --bucket= --dest_key= --feed_file=  --refresh_token='
    sys.exit(1)

  for opt, arg in opts:
    if opt == "--feed_file":
      feed_file = arg
    elif opt == "--bucket":
      bucket = arg
    elif opt == "--dest_key":
      dest_key = arg      
    elif opt == "--refresh_token":
      refresh_token = arg            

  if (feed_file is not None and bucket is not None and dest_key is not None and 
      refresh_token is not None):
    gs_processor(bucket, dest_key, feed_file, refresh_token)
  else:
    print 'Please specify  --bucket=  --dest_key= --feed_file= --refresh_token=' 