#!/usr/bin/python
from __future__ import print_function

import argparse
import boto
import boto.s3.acl
from boto.s3.connection import S3Connection, OrdinaryCallingFormat
from boto.s3.key import Key
from boto.s3.bucket import Bucket
from os.path import abspath, exists, expanduser, getsize
from sys import exit
from string import lower, strip
from time import time
from pprint import pprint

class Logger:
    _active = False

    def __init__(self, active = True):
        if active:
            self._active = True
    
    def write(self, message, Cr = True):
        if self._active:
            cr = ""
            if Cr:
                cr = "\n"
            print("{0}".format(message), end = cr)

    def activate(self):
        self._active = True
    
    def deactivate(self):
        self._active = False
    
    def active(self):
        return self._active

class S3:

    class ExceptionLocalFileDoesNotExit:
        def __init__(self, value):
            self.value = value
        def __str__(self):
            return self.value

    class ExceptionRemoteFileExists:
        def __init__(self):
            pass

    class ExceptionLocalFileExists:
        def __init__(self):
            pass

    class ExceptionRemoteFileDoesNotExist:
        def __init__(self):
            pass

    class ExceptionPutByteXMitMismatch:
        def __init__(self, expected, xmit):
            self.expected = expected
            self.xmit = xmit
        def __str__(self):
            return "{} of {} bytes transmitted".format(self.xmit, self.expected)

    accessKeyID = None
    accessKeySecret = None
    bucket = None
    remoteObject = None
    localObject = None
    reducedRedund = None
    crypt = None
    public = None

    _s3Connection = None
    _s3Key = None
    _s3Bucket = None

    def __init__(self, 
        accessKeyID, accessKeySecret, 
        bucket = None, remoteObject = None, 
        localObject = None, reducedRedund = True, 
        crypt = True,
	public = False
    ):
        self.accessKeyID = accessKeyID
        self.accessKeySecret = accessKeySecret
        self.bucket = bucket
        self.remoteObject = remoteObject
        self.localObject = localObject
        self.reducedRedund = reducedRedund
        self.crypt = crypt
	self.public = public
        
        try:
            self._s3Connection = S3Connection(self.accessKeyID, self.accessKeySecret, calling_format=OrdinaryCallingFormat())
        except:
            raise
        
        if self.bucket is not None:
            try:
                self._s3Bucket = Bucket(self._s3Connection, self.bucket)
                self._s3Key = Key(self._s3Bucket)
            except:
                raise
        
    def _status(self, xmit, size):
        print("{} / {}: {}%              \r".format(xmit, size, (float(xmit)/float(size))*100), end='')

    def _calcBucketSize(self, bucket):
        self._s3Bucket = Bucket(self._s3Connection, bucket)
        totalSize = 0
        for object in self._s3Bucket.list():
            totalSize = totalSize + object.size
        return totalSize            
        
    def put(self, 
        remoteObject = None,
        localObject = None,
        reducedRedund = None,
        crypt = None,
	public = None
    ):
        if remoteObject is None:
            remoteObject = self.remoteObject
        if localObject is None:
            localObject = self.localObject
        if reducedRedund is None:
            reducedRedund = self.reducedRedund
        if crypt is None:
            crypt = self.crypt
        if public is None:
            public = self.public

        localFile = abspath(expanduser(localObject))
        if not exists(localFile):
            raise self.ExceptionLocalFileDoesNotExit()
        try:
            if self._s3Key is None:
                self._s3Key = Key(Bucket(self._s3Connection, self.bucket), remoteObject)
            fileSize = getsize(localObject)
            with open(localObject) as fp:
                md5 = self._s3Key.compute_md5(fp)
                try:
                    remoteMD5 = strip(self._s3Bucket.get_key(remoteObject).etag, '"')
                    if md5[0] == remoteMD5:
                        raise self.ExceptionRemoteFileExists()
                except AttributeError:
                    pass
                except:
                    raise
                logger.write("Uploading local file '{0}' to s3://{1}/{2}".format(localObject, self.bucket, remoteObject))
                self._s3Key.name = remoteObject
                num_cb = 100
                if fileSize < 100000:
                    num_cb = 1000
                if fileSize < 1000000:
                    num_cb = 10000
                if fileSize < 10000000:
                    num_cb = 100000
                if fileSize < 100000000:
                    num_cb = 1000000
                if fileSize < 1000000000:
                    num_cb = 10000000
                if fileSize < 10000000000:
                    num_cb = 100000000
                    
                
                bytesXmit = self._s3Key.set_contents_from_file(fp, md5 = md5, reduced_redundancy = reducedRedund, encrypt_key = crypt, cb=self._status, num_cb=num_cb)
		if public is True:
			self._s3Key.set_acl('public-read')
                if fileSize != bytesXmit:
                    raise self.ExceptionPutByteXMitMismatch(fileSize,bytesXmit)
        except:
            raise

    def get(self,
        remoteObject = None,
        localObject = None
    ):
        if remoteObject is None:
            remoteObject = self.remoteObject
        if localObject is None:
            localObject = self.localObject

        localObject = abspath(expanduser(localObject))
        try:
            if self._s3Key is None:
                self._s3Key = Key(Bucket(self._s3Connection, self.bucket), remoteObject)
            try:
                remoteMD5 = strip(self._s3Bucket.get_key(remoteObject).etag, '"')
            except AttributeError:
                raise self.ExceptionRemoteFileDoesNotExist()
                
            if exists(localFile):
                with open(localObject) as fp:
                    md5 = self._s3Key.compute_md5(fp)
                    if md5[0] == remoteMD5:
                        raise self.ExceptionLocalFileExists()
            with open(localObject, "w") as fp:
                self._s3Key.name = remoteObject
                self._s3Key.get_contents_to_file(fp, cb=self._status, num_cb=1000)
        except:
            raise

    def getmeta(self, remoteObject, dataKey):
        try:
            self._s3Key = Key(Bucket(self._s3Connection, self.bucket), remoteObject)
            value = self._s3Key.get_xml_acl()
            value = self._s3Key.get_metadata("short-url")
            print(dataKey, value)
        except:
            raise

    def setmeta(self, remoteObject, dataKey, dataValue):
        try:
            self._s3Key = Key(Bucket(self._s3Connection, self.bucket))
            self._s3Key.key = remoteObject
            print("Setting meta data ({}) ({}) ({})".format(remoteObject, dataKey, dataValue))
            self._s3Key.set_metadata(dataKey, dataValue)
            print("Set")
        except:
            raise

    def rm(self, remoteObject = None):
        if remoteObject is None:
            remoteObject = self.remoteObject
        try:
            if self._s3Key is None:
                self._s3Key = Key(Bucket(self._s3Connection, self.bucket))
            self._s3Key.name = remoteObject
            self._s3Key.delete()
        except:
            raise

    def cp(self, sourceObject, targetObject, targetBucket = None, reducedRedund = None, crypt = None):
        if targetBucket is None:
            targetBucket = self.bucket
        if reducedRedund is None:
            reducedRedund = self.reducedRedund
        if crypt is None:
            crypt = self.crypt
        try:
            if self._s3Key is None:
                self._s3Key = Key(Bucket(self._s3Connection, self.bucket))
            self._s3Key.name = sourceObject
            self._s3Key.copy(targetBucket, targetObject, preserve_acl = True, reduced_redundancy = reducedRedund, encrypt_key = crypt)
        except:
            raise

    def mv(self, sourceObject, targetObject, targetBucket = None):
        self.cp(sourceObject, targetObject, targetBucket);
        self.rm(sourceObject);


    def ls(self, prefix = None):
        if self.bucket is not None:
            try:
                self._s3Bucket = Bucket(self._s3Connection, self.bucket)
            except:
                raise
        totalSize = 0
        objects = []
        for object in self._s3Bucket.list(prefix):
            objects.append({
                "key": object.key, 
                "size": object.size, 
                "last_modified": object.last_modified, 
                "storage_class":  object.storage_class,
                "encrypted": object.encrypted
            })
            totalSize = totalSize + object.size
        return({"totalSize": totalSize, "objects": objects})


    def lsb(self, bucket = None):
        bucketList = []
        if bucket is not None:
            for object in self._s3Connection.get_bucket(bucket):
                objectSize = self._calcBucketSize(object.name)
                bucketList.append({"name": object.name, "createDate": object.creation_date, "size": objectSize})
        else:
            for object in self._s3Connection.get_all_buckets():
                objectSize = self._calcBucketSize(object.name)
                bucketList.append({"name": object.name, "createDate": object.creation_date, "size": objectSize})
        return bucketList

def byteShift(number, byteshift):
    if byteshift == -1:
        return humanBytes(number)
    try:
        result = float(number) / float(1 << byteshift)
    except ZeroDivisionError:
        result = number
    return result

def humanBytes(number):
    previous = number
    for i in 10, 20, 30, 40:
        try:
            result = float(number) / float(1 << i)
            if result < 1:
                return previous
            else:
                previous = result
        except:
            return number
    return result
        
### End of class S3
parser = argparse.ArgumentParser() #(description = description, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-a", "--accesskeyid", nargs = 1, help="AWS Access Key ID", type=str)
parser.add_argument("-s", "--accesskeysecret", nargs = 1, help="AWS Access Key Secret", type=str)
parser.add_argument("-b", "--s3bucket", nargs = 1, help="S3 bucket", type=str)
parser.add_argument("-t", "--standard", help="Use Standard Storage (as opposed to reduced redundancy", action="store_true")
parser.add_argument("-n", "--nocrypt", help="Do not use server side encryption", action="store_true")
parser.add_argument("-l", "--localfile", nargs = 1, help="Local file to send", type=str)
parser.add_argument("-o", "--objectname", nargs = 1, help="Name of object to store", type=str)
parser.add_argument("-p", "--prefix", nargs = 1, help="prefix limit for bucket listings", type=str)
parser.add_argument("-u", "--public", help="Set uploaded objects to public access", action="store_true")
parser.add_argument("-q", "--quiet", help="Suppress stdout messages", action="store_true")
parser.add_argument("-B", "--bytes",     help="display sizes in bytes", action="store_true")
parser.add_argument("-K", "--kilobytes", help="display sizes in kilobytes", action="store_true")
parser.add_argument("-M", "--megabytes", help="display sizes in megabytes", action="store_true")
parser.add_argument("-G", "--gigabytes", help="display sizes in gigabytes", action="store_true")
parser.add_argument("-T", "--terabytes", help="display sizes in terabytes", action="store_true")
parser.add_argument("-H", "--humanbytes", help="display sizes in human readable format", action="store_true")
parser.add_argument("--key", nargs = 1, help="key, of a key/value pair", type=str)
parser.add_argument("--value", nargs = 1, help="value, of a key/value pair", type=str)

## Expect one or more commands
parser.add_argument("commands", nargs="+")
args = parser.parse_args()

logger = Logger()
if args.quiet:
    logger.deactivate()

byteshift = 0
if args.kilobytes:
    byteshift = 10
if args.megabytes:
    byteshift = 20
if args.gigabytes:
    byteshift = 30
if args.terabytes:
    byteshift = 40
if args.humanbytes:
    byteshift = -1


## Extract super and sub commands
commands = args.commands
try:
    command = lower(commands[0])
except:
    logging.critical("Missing command")
    raise SystemExit

try:
    accessKeyID = args.accesskeyid[0]
except:
    print("-a/--accesskeyid [S3 Access Key ID] required")
    exit(1)

try:
    accessKeySecret = args.accesskeysecret[0]
except:
    print("-s/--accesskeysecret [S3 Access Key Secret] required")
    exit(1)

try:
    s3Bucket = lower(args.s3bucket[0])
except:

    s3Bucket = None

try:
    localFile = args.localfile[0]
except:
    localFile = None

try:
    objectName = args.objectname[0]
except:
    objectName = None

reducedRedund = True
if args.standard:
    reducedRedund = False

try:
    dataKey = args.key[0]
except:
    dataKey = None

try:
    dataValue = args.value[0]
except:
    dataValue = None

crypt = True
if args.nocrypt:
    crypt = False


if command == "get":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    if localFile is None:
        print("-l/--localfile [local file] required")
        exit(1)
    if objectName is None:
        print("-o/--objectname [object name] required")
        exit(1)

    try:
        s3 = S3(accessKeyID, accessKeySecret, s3Bucket, reducedRedund = reducedRedund, crypt = crypt, remoteObject = objectName, localObject = localFile).get()
        logger.write("\nComplete")
    except S3.ExceptionLocalFileExists, e:
        logger.write("File already exists and contents match; no need to re-download");
    except S3.ExceptionRemoteFileDoesNotExist, e:
        logger.write("Remote file does not exist")
    except:
        raise
elif command == "put":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    if localFile is None:
        print("-l/--localfile [local file] required")
        exit(1)
    if objectName is None:
        print("-o/--objectname [object name] required")
        exit(1)

    try:
        s3 = S3(accessKeyID, accessKeySecret, s3Bucket, reducedRedund = reducedRedund, crypt = crypt, remoteObject = objectName, localObject = localFile, public = args.public).put()
        logger.write("\nComplete")
    except S3.ExceptionRemoteFileExists, e:
        logger.write("File already exists and contents match; no need to re-upload");
    except:
        raise

elif command == "rm":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    if objectName is None:
        print("-o/--objectname [object name] required")
        exit(1)

    try:
        s3 = S3(accessKeyID, accessKeySecret, s3Bucket, reducedRedund = reducedRedund, crypt = crypt, remoteObject = objectName).rm()
        logger.write("File Deleted")
    except:
        raise

elif command == "cp":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    if objectName is None:
        print("-o/--objectname [object name] required")
        exit(1)
    if localFile is None:
        print("The cp command uses -l/--localfile [local file] as the destination. -l/--localfile is required")
        exit(1)
    try:
        s3 = S3(accessKeyID, accessKeySecret, s3Bucket, reducedRedund = reducedRedund, crypt = crypt).cp(objectName, localFile)
        logger.write("File Copied")
    except:
        raise

elif command == "mv":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    if objectName is None:
        print("-o/--objectname [object name] required")
        exit(1)
    if localFile is None:
        print("The mv command uses -l/--localfile [local file] as the destination. -l/--localfile is required")
        exit(1)
    try:
        s3 = S3(accessKeyID, accessKeySecret, s3Bucket, reducedRedund = reducedRedund, crypt = crypt).mv(objectName, localFile)
        logger.write("File Moved")
    except:
        raise

elif command == "getmeta":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    if objectName is None:
        print("-o/--objectname [object name] required")
        exit(1)
    if dataKey is None:
        print("--key [meta key name] required")
        exit(1)
    s3 = S3(accessKeyID, accessKeySecret, s3Bucket)
    value = s3.getmeta(objectName, dataKey)
    logger.write(value)

elif command == "setmeta":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    if objectName is None:
        print("-o/--objectname [object name] required")
        exit(1)
    if dataKey is None:
        print("--key [meta key name] required")
        exit(1)
    if dataValue is None:
        print("--value [meta key value] required")
        exit(1)
    s3 = S3(accessKeyID, accessKeySecret, s3Bucket)
    value = s3.setmeta(objectName, dataKey, dataValue)
    #logger.write(value)
    

elif command == "ls":
    if s3Bucket is None:
        print("-b/--bucket [bucket] required")
        exit(1)
    try:
        print("key,size,last_modified,storage_class,encrypted")
        try:
            prefix = args.prefix[0]
        except:
            prefix = None
        ret = S3(accessKeyID, accessKeySecret, s3Bucket).ls(prefix)
        for object in ret["objects"]:
            print("{},{:.3f},{},{},{}".format(object["key"], byteShift(object["size"], byteshift), object["last_modified"], object["storage_class"], object["encrypted"]))
        totalSize = byteShift(ret["totalSize"], byteshift)
        print("Total Bucket Size: {:.3f}".format(totalSize))
        logger.write("Bucket Listed")
    except:
        raise

elif command == "lsb":
    totalSize = 0
    try:
        print("name,creation_date,size_in_bytes")
        for obj in S3(accessKeyID, accessKeySecret).lsb(s3Bucket):
            totalSize = totalSize + obj["size"]
            print("{},{},{:.3f}".format(obj["name"], obj["createDate"], byteShift(obj["size"], byteshift)))
        print ("Total Size of Buckets: {:.3f}".format(byteShift(totalSize, byteshift)))
        logger.write("Bucket List Generated")
    except:
        raise

else:
    print ("Unknown or no command")
    exit(1)

            
