#!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This code is not supported by Google
#

"""
Sample script to validate a GCS feed file.

The script reads a csv or xml GCS/MC feed file, parses it to extract the 
unique ID fields for each item and performs the following:

  * checks to see if the ID has been defined multiple times in the current feed file
  * checks to see if the ID has been defined in multiple feed files at different times
  * checks to see if the ID has been submitted  within the last 4 hours
  * checks the total number of items defined in previous feed compared to the current
  * saves the item ID, feed name, history and timestamp for each item as a file database row
    
Usage
 Requires python 2.6+
 
 Merchants with multiple feeds should reuse the shared database file (FEED_HISTORY_DB).
 For example, if you have two feeds (GCS+PS and GCS only), use
 
 validate_feed('gcs_ps.xml', 'gcspsfeed', 'xml').validate()
 validate_feed('gcs.xml', 'gcsfeed', 'xml').validate()

 FEED_HISTORY_DB will contain offers from both gcs_ps.xml and gcs.xml files above. 
 Do not run this script on multiple feed files at the same time (rerun it against subsequent feeds
 only after the previous run completes).
 
 validator = validate_feed(feed_file,feed_name,feed_type)
     feed_file: Feed file to validate
     feed_name: Name of the feed exactly as shown in Merchant Center --> Data Feeds
     feed_type: csv or xml (for csv files, specify delimiter and quote scheme in code below)
     
 validator.validate():  validates the current feed file to detect duplicates and previous defined items in a feed file.
 
 validator.verifyTransfer(<merchant_id>,<oauth token>,[<list of usecases to check for>])
 
 where the merchant center admin account gets <oauth token> with 
 SCOPE=https://www.googleapis.com/auth/structuredcontent
 see:  http://code.google.com/p/gcs-admin-toolkit/source/browse/trunk/src/auth/gcs_auth.py
       https://developers.google.com/console/help/#service_accounts
 
 from gcs_auth import gcs_auth
 g = gcs_auth('<oauth2 login>@developer.gserviceaccount.com','<oauth2 private key file>')
 validator.verifyTransfer(<merchant_id>,g.read_token(),[validator.COMMERCE_SEARCH_USECASE])
 
 
 sample usage could be
   validator = validate_feed(feed_file,feed_name,feed_type)
   validator.validate()
   ..
   *perform transfer with any mechanism FTP or Google CloudStore* 
     >> eg http://code.google.com/p/gcs-admin-toolkit/source/browse/trunk/src/python/gs_processor.py     

   * once transfer is complete, verify the transfer outcome:
   g = gcs_auth('<oauth2 login>@developer.gserviceaccount.com','<oauth2 private key file>')
   validator.verifyTransfer(<merchant_id>,g.read_token(),[validator.COMMERCE_SEARCH_USECASE])   
 
 To query for an item or feed history:
 
 item_history = validator.offer_history(offer_id)
 feed_history = validator.feed_history(feed_name)
 
Reference:
 https://addons.mozilla.org/en-US/firefox/addon/sqlite-manager/
 http://code.google.com/p/gcs-admin-toolkit/wiki/GCSAuthentication 
"""

import sys
import getopt
import logging
import datetime
import sqlite3
import time
import csv
import xml.sax
from datetime import datetime
from dateutil import tz
import urllib,urllib2
from urllib2 import URLError, HTTPError
import libxml2

class validate_feed(object):
  
  FEED_HISTORY_DB = 'feedstatus.db'
  ALERT_MESSAGES = {}
  
  # valid usecased to check for in the feed status response
  COMMERCE_SEARCH_USECASE = 'CommerceSearch'
  PRODUCT_SEARCH_USECASE = 'ProductSearch'
    
  def __init__(self,feed_file,feed_name,feed_type):
    self.FEED_FILE = feed_file
    self.FEED_NAME = feed_name
    self.FEED_TYPE = feed_type
    self.id = []
    self.TRANSFER_TIME = datetime.utcnow().replace(tzinfo = tz.tzutc())
    
    try:
      self.conn = sqlite3.connect(self.FEED_HISTORY_DB,check_same_thread = False)
      self.cursor = self.conn.cursor()
    except sqlite3.connect:
      self.log("ERROR Connecting to DB",logging.ERROR)
      return False    
    
    # create the initial table structure  
    try:
      self.cursor.execute('CREATE TABLE mc_item (id text primary key unique, history varchar, last_update text, feed_id text)')
    except sqlite3.OperationalError, e:
      if  str(e) != "table mc_item already exists":
        self.log(str(e),logging.ERROR)
        return False

    try:
      self.cursor.execute('CREATE TABLE mc_feed (feed_id text primary key unique, last_update text, last_items_inserted integer, history varchar)')  
    except sqlite3.OperationalError, e:
      if  str(e) != "table mc_feed already exists":
        self.log(str(e),logging.ERROR)
        return False      
      
    try:
      self.cursor.execute('create index idx_id ON mc_item(id);')
    except sqlite3.OperationalError, e:
      if (str(e) != 'index idx_id already exists'):
        self.log(str(e),logging.ERROR)
        return False    
      
    # parse csv/xml feed file and extract out the provided ID numbers for each item    
    # Specify the delimiter and quoting scheme
    self.log( str("start Feed Validation %s, %s"% (self.FEED_FILE, self.FEED_NAME)),logging.INFO)
    if self.FEED_TYPE.lower() == 'csv':
      r = csv.reader(open(self.FEED_FILE, 'r'),delimiter='\t',quoting=csv.QUOTE_ALL)
      headers = r.next()
      colnum = 0
      for col in headers:
        if col.lower() == 'id':
          break
        colnum = colnum + 1
      for fields in r:
        self.id.append(fields[colnum])
    
    if self.FEED_TYPE.lower() == 'xml':
      try:    
        source = open(self.FEED_FILE)
        sparser = ABContentHandler()
        xml.sax.parse(source, sparser)
        self.id = sparser.id_array
      except Exception, e:
        self.log(str(e),logging.ERROR)
        self.ALERT_MESSAGES[str(e)] = True
        return False            

  # validates the feed file and checks to see if items in the feed file are unique
  # checks to see if any item in the feed file is defined in other feeds.
  def validate(self):

    now = datetime.utcnow().replace(tzinfo = tz.tzutc())
    formatted_now = now.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
    self.TRANSFER_TIME = now
        
    for item in set(self.id):
      if self.id.count(item) > 1:
        self.log( str("WARNING: Offer exists multiple times in current feed file: [" + self.FEED_FILE + "]: " + item), logging.WARNING)
        self.ALERT_MESSAGES["Offer exists multiple times in current feed file"] = True    
    
    # for each item id number extract out its history
    # and compare to see if the ID exists in some other feed and if
    # its been inserted in the current feed itself
    try:
      t = (self.FEED_NAME,)
      feed_history = []
      feed_last_update = now
      feed_last_items_inserted = None
      for row in self.cursor.execute('SELECT feed_id, last_update, last_items_inserted, history FROM mc_feed WHERE feed_id=?', t):
        utc = datetime.strptime(row[1], "%Y-%m-%dT%H:%M:%S.%fZ")
        feed_last_update = utc.replace(tzinfo = tz.tzutc())        
        feed_last_items_inserted = row[2]
        feed_history = eval(row[3])
        
      # check to see if the current feed has les than 70% of the previous feeds
      if (feed_last_items_inserted != None):
        if len(self.id)/float(feed_last_items_inserted) < 0.7:      
          self.log("WARNING: Number of items in current feed is less than 70% of the previous feed", logging.WARNING)
          self.ALERT_MESSAGES["Number of items in current feed is less than 70% of the previous feed"] = True          
        
      for i in self.id:
        t = (str(i),)
        history = []
        for row in self.cursor.execute('SELECT id, history, last_update, feed_id FROM mc_item WHERE id=?', t):
          history = eval(row[1])
          # check to see if the item was submitted within the last 4 hrs
          REFEED_DELAY = 10  #4*60*60    
          if (len(history)>1):
            utc1 = datetime.strptime(history[len(history)-1].get('updated'), "%Y-%m-%dT%H:%M:%S.%fZ")            
            feed_time = utc1.replace(tzinfo = tz.tzutc())
            if ( ( now - feed_time ).seconds < (REFEED_DELAY)):
              self.log("WARNING: Item resubmitted within 4 hours of last feed: " + str(i), logging.WARNING)
              #self.ALERT_MESSAGES["WARNING: Item resubmitted within 4 hours of last feed: " + str(i)] = True
          
          # check to see if the item is in another feed file or repeated in the current one
          for logentry in history:
            fname = logentry.get('feed_name')
            ftime = logentry.get('updated')
            if fname.lower() != self.FEED_NAME.lower():
              self.log( str( "WARNING: Offer exists in another feed: [" + logentry[1] + "]: " + i), logging.WARNING)
              self.ALERT_MESSAGES["Offers exists in another feed"] = True
                    
    except sqlite3.InterfaceError, e:
      self.log( str("Unable to insert " + str(e)),logging.ERROR)
      
    self.log(str('Number of offers processed: ' + str(len(self.id))), logging.INFO)
    self.log("end Feed Validation", logging.INFO)
    self.conn.commit()      
    self.cursor.close()
    
    if len(self.ALERT_MESSAGES)>0:
      return False
    
    return True
  
  # uses the merchant center 'data feed api' to see when the item gets processed
  # and what the valid/enlisted feed usecases are.
  # inserts the 'per item' history and feed history to a sqllite3 database
  def verifyTransfer(self, cid, token, usecases_to_monitor):
    is_processed = False
    self.cursor = self.conn.cursor()
    # delay the number of 60 second retries to attempt (1 hour default)
    delay = 0
    self.log('start feed verification ',logging.INFO)    
    # while the feed isn't processed or for one hour with unprocessed feeds
    while not is_processed and delay < 60:
      
      #query using the data feed api
      url = 'https://content.googleapis.com/content/v1/' + cid + '/datafeeds/products?alt=atom&max-results=25' 
      
      # for ClientLogin
      # https://developers.google.com/accounts/docs/AuthForInstalledApps
      # see http://code.google.com/p/gcs-admin-toolkit/source/browse/trunk/src/python/fetch-products.py#56
      # req.add_header('Authorization',"GoogleLogin auth=" + self.AUTH_TOKEN)
      
      # for oauth2 
      # https://developers.google.com/accounts/docs/OAuth2#serviceaccount
      # NOTE: oauth2 tokens are only valid for 1 hour.  Expired tokens should get renewed
      # set SCOPE=https://www.googleapis.com/auth/structuredcontent
      # see http://code.google.com/p/gcs-admin-toolkit/source/browse/trunk/src/auth/gcs_auth.py
      url = url + '&access_token=' + token
      req = urllib2.Request(url)
      
      req.add_header('GData-Version', '1')
      req.add_header('Content-type', 'application/atom+xml')
             
      try:
        content = urllib2.urlopen(req).read()
        #self.log('Query Response : ' + content,  logging.DEBUG)  
  
        # parse the xml and create namespaces we'll need later
        doc = libxml2.parseDoc(content)
        ctxt = doc.xpathNewContext()
        ctxt.xpathRegisterNs('openSearch', "http://a9.com/-/spec/opensearchrss/1.0/")
        ctxt.xpathRegisterNs('s', "http://www.google.com/shopping/api/schemas/2010")  
        ctxt.xpathRegisterNs('atom', "http://www.w3.org/2005/Atom")  
        ctxt.xpathRegisterNs('app', "http://www.w3.org/2007/app")
        ctxt.xpathRegisterNs('gd', "http://schemas.google.com/g/2005")
        ctxt.xpathRegisterNs('sc', "http://schemas.google.com/structuredcontent/2009")
        ctxt.xpathRegisterNs('scp', "http://schemas.google.com/structuredcontent/2009/products")
        ctxt.xpathRegisterNs('batch',"http://schemas.google.com/gdata/batch")
      except HTTPError, e:
        self.log('HTTPError code: ' + str(e.code),logging.ERROR)
        self.log(e.read(),logging.ERROR)
        sys.exit(1)
      except URLError, e:
        self.log( 'URLError: ' + str(e.reason),logging.ERROR)
        self.log(e.read(),logging.ERROR)
        sys.exit(1)
      except Exception, e:
        self.log( 'Error: ' + str(e),logging.ERROR)
        sys.exit(1)   
                
      # iterate through each feed 
      nl_feeds = ctxt.xpathEval('/atom:feed/atom:entry')   
      for n_feed in nl_feeds:
        # reset the xpath contenxt to the entry
        ctxt.setContextNode(n_feed)          
        id = urllib2.unquote(ctxt.xpathEval('atom:id')[0].content)
        title = ctxt.xpathEval('atom:title')[0].content
        updated = ctxt.xpathEval('atom:updated')[0].content
        if (title.lower() == self.FEED_NAME.lower()):
          n_status = ctxt.xpathEval('sc:processing_status')[0]
          status  = n_status.prop('status')
          self.log(str("processing status: " +  str(n_status)),logging.INFO)
          if (status == 'processed'):
            self.log('feed processing completed: ' +self.FEED_NAME,logging.INFO)
            
            nl_errors = ctxt.xpathEval('sc:processing_status/sc:feed_errors/sc:feed_error')
            for n_error in nl_errors:
              self.log("MC postprocessing feed errors: " + n_error.prop('message'), logging.DEBUG)
            
            items_inserted = ctxt.xpathEval('sc:processing_status/sc:items_inserted')[0].content
            items_processed = ctxt.xpathEval('sc:processing_status/sc:items_processed')[0].content
            
            if (float(items_inserted)/float(items_processed) < 0.8):
              self.log( str( 'WARNING: items_inserted is less than 80% of the number of items processed in feed'), logging.WARNING)
              self.ALERT_MESSAGES['WARNING: items_inserted is less than 80% of the number of items processed in feed'] = True            

            if (float(len(self.id))/float(items_processed) < 0.8):
              self.log( str( 'WARNING: items_inserted is less than 80% of the number of items processed in the last feed file'), logging.WARNING)
              self.ALERT_MESSAGES['WARNING: items_inserted is less than 80% of the number of items processed in the last feed file'] = True            


            nl_destinations = ctxt.xpathEval('sc:feed_destination')
            for n_destinaton in nl_destinations:
              if (n_destinaton.prop('dest') in usecases_to_monitor and n_destinaton.prop('enabled') == 'false'):
                self.log( str( 'WARNING: feed not enlisted for ' + n_destinaton.prop('dest')), logging.WARNING)
                self.ALERT_MESSAGES[str( 'WARNING: feed not enlisted for ' + n_destinaton.prop('dest'))] = True                
                                       
            is_processed = True
                      
            try:
              conn = sqlite3.connect(self.FEED_HISTORY_DB,check_same_thread = False)
              c = conn.cursor()
            except sqlite3.connect:
              self.log("ERROR Connecting to DB",logging.ERROR)
              return False              
                    
            t = (self.FEED_NAME,)
            feed_history = []
            for row in c.execute('SELECT feed_id, last_update, last_items_inserted, history FROM mc_feed WHERE feed_id=?', t):
              utc = datetime.strptime(row[1], "%Y-%m-%dT%H:%M:%S.%fZ")
              feed_last_update = utc.replace(tzinfo = tz.tzutc())        
              feed_last_items_inserted = row[2]
              feed_history = eval(row[3])
                
            # save last 40 uploads as history
            if len(feed_history) > 40:    
              del feed_history[0:len(feed_history)-40] 
                     
            # create a dict with all the feed history                  
            dict_entry = {'updated': updated, 'feed_name': self.FEED_NAME, 'items_in_feed': len(self.id), 'items_processed': items_processed, 'items_inserted': items_inserted}
            feed_history.append(dict_entry)   
            stmt = ('INSERT OR REPLACE INTO  mc_feed (feed_id, last_update, last_items_inserted, history) VALUES ("%s","%s","%s", "%s")')%(self.FEED_NAME, updated,str(len(self.id)), str(feed_history))
            self.cursor.execute(stmt)
            self.conn.commit()
              
            # now recall each item from the sqllite database to save history
            for i in self.id:
              t = (str(i),)
              item_history = []
              for row in self.cursor.execute('SELECT id, history, last_update, feed_id FROM mc_item WHERE id=?', t):
                item_history = eval(row[1])
              dict_item = {'updated': updated, 'feed_name': self.FEED_NAME}              
              item_history.append (dict_item)
              # save last 20 uploads as history
              if len(item_history) > 20:    
                del item_history[0:len(feed_history)-20]               
              stmt = ('INSERT OR REPLACE INTO  mc_item (id, history, last_update, feed_id) VALUES ("%s","%s","%s","%s")')%(str(i),item_history,updated,self.FEED_NAME)              
              self.cursor.execute(stmt)            
              self.conn.commit()           
            self.cursor.close()            
          else:
            # the feed is still being processed in MC. Wait 60 seconds and retry upto 60 times (until delay <60)
            self.log('feed stil processing...',logging.INFO)                                          
            time.sleep(60)
            delay = delay + 1
    self.log( 'end feed verification ',logging.INFO)
    
    if len(self.ALERT_MESSAGES)>0:
      return False     
    
    return True
  
  # recall the history of the offer
  def offer_history(self, offer_id):
    try:
      conn = sqlite3.connect(self.FEED_HISTORY_DB,check_same_thread = False)
      c = conn.cursor()
    except sqlite3.connect:
      self.log("ERROR Connecting to DB",logging.ERROR)
      return None
      
    try:
      t = (offer_id,)
      history = []
      for row in c.execute('SELECT id, history, last_update FROM mc_item WHERE id=?', t):
        history = eval(row[1])
        return history
    except sqlite3.OperationalError, e:
        self.log(str(e),logging.ERROR)
        return None 
      
  # recall the history of the offer
  def feed_history(self, feed_id):
    try:
      conn = sqlite3.connect(self.FEED_HISTORY_DB,check_same_thread = False)
      c = conn.cursor()
    except sqlite3.connect:
      self.log("ERROR Connecting to DB",logging.ERROR)
      return None
      
    try:
      t = (feed_id,)
      history = []
      for row in c.execute('SELECT feed_id, history, last_update FROM mc_feed WHERE feed_id=?', t):
        history = eval(row[1])
        return history
    except sqlite3.OperationalError, e:
        self.log(str(e),logging.ERROR)
        return None         
    
  def log(self,msg, loglevel):
    LOG_FILENAME = 'feedstatus_'+ self.FEED_NAME +  '.log'   
    logging.basicConfig(filename=LOG_FILENAME,level=logging.INFO)
    m = ('[%s] %s') % (datetime.now(), msg)
    print m
    if (loglevel == logging.DEBUG):
      logging.debug(m)  
    elif (loglevel == logging.ERROR):
      logging.error(m)         
    else:
      logging.info(m)
      
class ABContentHandler(xml.sax.ContentHandler):
  
  is_id = False
  item_or_entry = False
  id_array = []
  
  def __init__(self):
    xml.sax.ContentHandler.__init__(self)
 
  def startElement(self, name, attrs):
    if (name.lower() == 'item' or name.lower() == 'entry'):
      self.item_or_entry = True
    if self.item_or_entry and (name.lower().endswith(":id") or name.lower() == 'id'):
      self.is_id = True
      
  def endElement(self, name):   
    if name.lower().endswith(":id") or name.lower() == 'id':
      self.is_id = False 
      self.item_or_entry = False     
 
  def characters(self, content):
    if self.is_id:
      self.id_array.append(content)
                     
if __name__ == '__main__':
  feed_file = None
  feed_name = None
  feed_type = None

  try:
    opts, args = getopt.getopt(sys.argv[1:], None, ["feed_file=", "feed_name=", "feed_type="])
  except getopt.GetoptError:
    print 'Please specify --feed_file= --feed_name= --feed_type= '
    sys.exit(1)

  for opt, arg in opts:
    if opt == "--feed_file":
      feed_file = arg
    elif opt == "--feed_name":
      feed_name = arg
    elif opt == "--feed_type":
      feed_type = arg               
 
  if (feed_file is not None and feed_name is not None and feed_type is not None):
    v = validate_feed(feed_file,feed_name,feed_type)
    v.validate()
    #print v.offer_history('19363')
    #print v.feed_history('varianttest')
  else:
    print 'Please specify  --feed_file=  --feed_name= --feed_type= '     