#!/usr/bin/env python
###############################################################################
# File: models.py
# Name: Sal Fuentes Jr.
# Date: 09/06/2008
# Info: This module describes the data, its properties, and some basic business logic
################################################################################
import S3
import hashlib
import logging

from google.appengine.ext import db
from google.appengine.api import memcache

class AWSModel:
  S3BASE_CACHE_EXPIRE_VAL = 60*5 #5 minutes
  
  def __init__(self, access_key, secret_key):
    aws_credentials = (access_key, secret_key)
    self.aws_creds_hash = hashlib.sha1(str(aws_credentials)).hexdigest() #not sure if this will consume too much cpu quota
    self.s3_conn = S3.AWSAuthConnection(aws_access_key, aws_secret_key) #maybe we can realize this object in datastore
    
  def get_data(self, obj_request, bucket_name="", key_name=""):
    #TODO: Need to make the memcache key more 'intuitive'/friendly
    memcache_key = self.aws_creds_hash + bucket_name + obj_request + key_name
    data = memcache.get(memcache_key)
    if data is not None:
      logging.debug('fetched from memcache')
      return data
    else:
      data = self.get_data_from_amazon(obj_request, bucket_name, key_name)
      if not memcache.add(memcache_key, data, AWSModel.S3BASE_CACHE_EXPIRE_VAL):
        logging.error("Memcache set failed for aws s3 obj_request=%s." % obj_request)
      return data    
    
  def get_data_from_amazon(self, obj_request, bucket_name="", key_name=""):
    cases = {
               "buckets": lambda : self.s3_conn.list_all_my_buckets().entries,    
               "keys": lambda: self.s3_conn.list_bucket(bucket_name).entries,
               "key" : lambda: self.s3_conn.get(bucket_name, key_name)
            }
    return cases.get(obj_request)()
    
  def get_url(self, bucket_name, key_name, expires=60):
      s3_gen = S3.QueryStringAuthGenerator(aws_access_key_id, aws_secret_access_key)     
      s3_gen.set_expires_in(expires);
      url = s3_gen.get(bucket_name, key_name) 
      return url
      
  def delete_objs(self, bucket_name, key_names):
    for key_name in key_names:
      response = self.s3_conn.delete(bucket_name, key_name)
    
    logging.debug("RESPONSE: %s" % response)
    aws_credentials = self.aws_access_key, self.aws_secret_key
    memcache_key = self.aws_creds_hash + bucket_name + "keys"        
    memcache.delete(memcache_key) #flush listing of keys for this bucket to enable refresh
    return response
    
  def put_obj(self, bucket_name, key_name, obj):
    response = self.s3_conn.put(bucket_name, key_name, obj)
    
    logging.debug("RESPONSE: %s" % response)
    aws_credentials = self.aws_access_key, self.aws_secret_key
    memcache_key = self.aws_creds_hash + bucket_name + "keys"        
    memcache.delete(memcache_key) #flush listing of keys for this bucket to enable refresh
    return response  
    
  
