__author__ = 'D60'
from celery import Celery,task
import celery
import boto
import boto.s3.connection
import subprocess
from time import gmtime, strftime
import os
import shutil
access_key = 'abc'
secret_key = '123'

conn = boto.connect_s3(
        aws_access_key_id = access_key,
        aws_secret_access_key = secret_key,
        host = '10.1.35.91',
        is_secure=False,               # uncommmnt if you are not using ssl
        calling_format = boto.s3.connection.OrdinaryCallingFormat(),
        )
bucket = conn.create_bucket('mybucket')
app = Celery('tasks',backend='amqp://guest:guest@localhost:5672//', broker='amqp://guest:guest@localhost:5672//')

@task
def add(x, y):
    return x + y

@celery.task
def s3up(filepath):
    #cmd = "/usr/bin/ffmpeg -i "+filepath+" -c:v libx264 -c:a aac -strict -2  -hls_list_size 0 -hls_time 5 -f hls /tmp/outfile.m3u8"
    nowtime = strftime("%Y%m%d%M%S", gmtime())
    tmpdir = '/tmp/'+nowtime
    if os.path.exists(tmpdir):
        shutil.rmtree(tmpdir) 
    os.mkdir(tmpdir)
    rc = subprocess.call(["mp4to",filepath,tmpdir])
    key_dir = filepath.split('/')[-1].split('.')[-2]
    filelist = os.listdir(tmpdir)
    for filename in filelist:
        keyname = key_dir+"/"+str(filename)
        tmppath =  tmpdir+"/"+filename
        bucket.new_key(keyname).set_contents_from_filename(tmppath)
        key = bucket.get_key(keyname)
        key.set_canned_acl('public-read')
    shutil.rmtree(tmpdir) 
    #tmp = filepath.split('/')
    #filename = tmp[-1]
    #keyname = 's1/'+str(filename)
    #print keyname
    #bucket.new_key(keyname).set_contents_from_filename(filepath)
    #key = bucket.get_key(keyname)
    #key.set_canned_acl('public-read')
    #return s3up.request
    #return filename
    return "ok"
