"""
Copyright 2006 Hanzoarchives limited

   Licensed under the Apache License, Version 2.0 (the "License");
   you may not use this file except in compliance with the License.
   You may obtain a copy of the License at

       http://www.apache.org/licenses/LICENSE-2.0

   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License.
"""
import S3
import os
import urllib
import httplib
import time

units = [ "bytes" , "Kb" , "Mb" , "Gb" , "Tb" , "Pb" ]
BUF_SIZE = 20480

def _nicesize( v , uidx ):
    k = float(v)/1024
    if k > 1:
        return _nicesize( k , uidx + 1 )
    else:
        return ( v, uidx )       

def nicesize( v ):
    ( v , uidx ) = _nicesize( v ,0 )
    return "%#.2f %s" % ( v ,  units[uidx]  )
        
def getKeys(  ):    
    if os.environ.has_key( "AWS_ACCESS_KEY_ID") and  os.environ.has_key( "AWS_ACCESS_KEY_SECRET"):
        return ( os.environ[ "AWS_ACCESS_KEY_ID"] , os.environ[ "AWS_ACCESS_KEY_SECRET"] )
    return ( None , None )
    
def getconnection( accesskey , secretkey ):
    return S3.AWSAuthConnection( accesskey, secretkey )
    
def listAllBuckets( conn ):
    ret = []
    buckets = conn.list_all_my_buckets().entries
    for bucket in buckets:
        ret.append( bucket.name )
    return ret        
    
def listBucket( conn , bname ):
    ret = {}
    options = {}
    while True:
        res = conn.list_bucket( bname , options )
        if res.http_response.status != 200:
            print  res.http_response.status , res.http_response.reason
            return None
        for item in res.entries:
            ret[ str(item.key )] = ( item.last_modified ,
                                                item.etag,
                                                item.size,
                                                item.storage_class,
                                                item.owner  )
            lastkey = item.key
        if not res.is_truncated:
            break
        else:
            options = { "marker":lastkey }
    return ret        
    
def listBucketItr( conn , bname , options={} ):
    while True:
        ret = {}
        res = conn.list_bucket( bname , options )
        if res.http_response.status != 200:
            print  res.http_response.status , res.http_response.reason
            return 
        for item in res.entries:
            ret[ str(item.key )] = ( item.last_modified ,
                                                item.etag,
                                                item.size,
                                                item.storage_class,
                                                item.owner  )
            lastkey = item.key
        yield ret            
        if not res.is_truncated:
            break
        else:
            options = { "marker":lastkey }
    return      
    
def makeBucket( conn ,  bname ): 
    ret = conn.create_bucket(bname)
    if ret.http_response.status != 200:
        return ( False ,  ret.http_response.reason ) 
    return (True , None )      
    
def makeBucket( conn ,  bname ): 
    ret = conn.create_bucket(bname)
    if ret.http_response.status != 200:
        return ( False ,  ret.http_response.reason  ) 
    return (True , None ,  )        
    
def removeBucket( conn ,  bname ): 
    ret = conn.delete_bucket(bname)
    if ret.http_response.status != 204:
        return ( False ,  ret.http_response.reason ) 
    return (True , None )        
    
def putInBucket( conn , bname , keyname , content ,  meta , headers ):
    ret = conn.put(
        bname,
        keyname,
        S3.S3Object(content , meta),
        headers )
    if ret.http_response.status != 200:
        return ( False ,  ret.http_response.reason ) 
    return (True , None )      
    
def getFromBucket( conn , bucket , keyname ):
    ret = conn.get(bucket , keyname)
    if ret.http_response.status != 200:
        return ( False ,  ret.http_response.reason  , None ) 
    return (True , None , ret.body )   
    

def deleteFromBucket( conn , bucket , keyname ):
    ret = conn.delete(bucket , keyname)
    if ret.http_response.status != 200:
        return ( False ,  ret.http_response.reason  , None ) 
    return (True , None , ret.body )   
    

def getacl(conn, bucket, keyname):
    if not keyname:
        keyname = ''
    ret = conn.get_acl(bucket, keyname)
    if ret.http_response.status != 200:
        return ( False ,  ret.http_response.reason  , None ) 
    return (True , None , ret.body )   

def putacl(conn, bucket, keyname, acl):
    if not keyname:
        keyname = ''
    ret = conn.put_acl(bucket, keyname, acl)
    if ret.http_response.status != 200:
        return ( False ,  ret.http_response.reason, ret.body ) 
    return (True , None, None )   

def getHTTPConnection( ):
    is_secure=True
    server=S3.DEFAULT_HOST
    port = S3.PORTS_BY_SECURITY[is_secure]
    if (is_secure):
        return httplib.HTTPSConnection("%s:%d" % (server, port))
    else:
        return httplib.HTTPConnection("%s:%d" % (server, port))                                
    
def readFromBucket( accesskey , secretkey  , bucket , keyname , outf , range = None , method='GET'):
    conn = getHTTPConnection( )    
    path = '%s/%s' % (bucket, urllib.quote_plus(keyname))
    headers = {}
    if range is not None:
        headers['Range'] =  "bytes=%u-%u" % ( int(range[0]) , int(range[1]) )
    data = ''
    add_aws_auth_header( accesskey , secretkey , headers , method , path )
    conn.request(method, "/%s" % path, data, headers )
    resp = conn.getresponse()
    if int( resp.status ) >= 200 and int( resp.status ) < 300:
        if method == 'HEAD':
             # Print out headers
             # HTTPResponse.getheaders() shows up in python 2.4.
             for k, v in resp.getheaders():
                 outf.write("%s: %s\n" % (k, v.strip('"')))
             return (True, None)
        else:
             # Assume method is 'GET' and there's response data.
             while True:
                 dat = resp.read( BUF_SIZE * 10 )
                 if dat:
                     outf.write( dat )
                 else:
                     break
        return ( True , None )
    else:
        return ( False , resp.reason )
        
def streamToBucket( accesskey , secretkey  , bucket , keyname , filesize , inf ,headers, meta):
    conn = getHTTPConnection( )    
    method = 'PUT'
    path = '%s/%s' % (bucket, urllib.quote_plus(keyname))
    if not headers:
         headers = {}
    headers['Content-Length'] = str( filesize )
    if meta:
        headers = S3.merge_meta(headers, meta)
    add_aws_auth_header( accesskey , secretkey , headers , method , path )
    conn.putrequest(method, "/%s" % path )
    for k in headers:
        conn.putheader( k , headers[k] )       
    conn.endheaders()
    while True:
        dat = inf.read( BUF_SIZE * 10 )
        if dat:
            conn.send( dat )
        else:
            break        
    resp = conn.getresponse()
    if int( resp.status ) >= 200 and int( resp.status ) < 300:
        return ( True , None )
    else:
        return ( False , resp.reason )        


def add_aws_auth_header( accesskey , secretkey , headers, method, path):
    if not headers.has_key('Date'):
        headers['Date'] = time.strftime("%a, %d %b %Y %X GMT", time.gmtime())
    c_string = S3.canonical_string(method, path, headers)
    headers['Authorization'] = "AWS %s:%s" % (accesskey, 
                                                                        S3.encode(secretkey, c_string))
    
    
    
    
    
 
    
    
