from __future__ import print_function

import boto
from boto.s3.key import Key
from datetime import datetime
import os
import sys
import subprocess

AWS_ACCESS_KEY = 'AKIAITUVXL5LCLFSURQQ'
AWS_SECRET_KEY = '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'
AWS_BUCKET = 'keras-ndsb'


def store_to_s3(file_name, target_name, delete=False):
    start = datetime.now()
    print('Storing ' + file_name + ' to S3...')
    conn = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_KEY)
    bucket = conn.get_bucket(AWS_BUCKET)
    key = Key(bucket)
    key.key = target_name
    key.set_contents_from_filename(file_name)
    print('Stored in ' + str(datetime.now() - start) + 's')
    if delete:
        print('Deleting local file.')
        os.remove(file_name)
        print('Deleted.')


def get_from_s3(file_name, file_output):
    start = datetime.now()
    print('Getting ' + file_name + ' from S3...')
    conn = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_KEY)
    bucket = conn.get_bucket(AWS_BUCKET)
    key = bucket.get_key(file_name)
    key.get_contents_to_filename(file_output)
    print('Retrieved in ' + str(datetime.now() - start) + 's')

def sync_experiments_upload(weights=False):
    import subprocess

    if weights:
        subprocess.call(["aws s3 sync experiments/ s3://keras-ndsb/experiments/"], shell=True)
    else:
        subprocess.call(["aws s3 sync experiments/ s3://keras-ndsb/experiments/ --exclude \"*.hdf5\""], shell=True)


def sync_experiments_download(weights=False):
    if weights:
        subprocess.call(["aws s3 sync s3://keras-ndsb/experiments/ experiments/"], shell=True)
    else:
        subprocess.call(["aws s3 sync s3://keras-ndsb/experiments/ experiments/ --exclude \"*.hdf5\""], shell=True)


def upload_all_files_in_folder(folder_path):
    bits = folder_path.split("/")
    for subdir, dir, files in os.walk(folder_path):
        for file in files:
            # print(file)
            if file.startswith("y_"):
                file_path = os.path.join(subdir, file)
                print(file_path)
                target_path = "/" + bits[-1] + "/" + file
                print(target_path)
                store_to_s3(file_path, target_name=target_path)


def upload_files_to_aws():
    upload_all_files_in_folder("/home/ubuntumax/keras-dsb/train_npy_files")
    upload_all_files_in_folder("/home/ubuntumax/keras-dsb/validation_npy_files")
    upload_all_files_in_folder("/home/ubuntumax/keras-dsb/CRPS_outputs")
    upload_all_files_in_folder("/home/ubuntumax/keras-dsb/model_json")
    upload_all_files_in_folder("/home/ubuntumax/keras-dsb/proba_outputs")

if __name__ == "__main__":
    print (sys.argv)
    if len(sys.argv) > 1:
        # assert len(sys.argv) == 3
        if sys.argv[1] == 'upfile':
            store_to_s3(file_name=sys.argv[2], target_name=sys.argv[3], delete=False)
        elif sys.argv[1] == 'downfile':
            get_from_s3(sys.argv[2])
    else:
        print('Specify "downfile" or "upfile" and a filename.')
