
import subprocess
import multiprocessing
import time
import os
import re
import Queue
from threading  import Thread, Lock
import signal
try:
    import json #@UnresolvedImport
except:
    import simplejson as json #@UnresolvedImport
from SimpleXMLRPCServer import SimpleXMLRPCServer
from SimpleXMLRPCServer import SimpleXMLRPCRequestHandler
from xmlrpclib import ServerProxy
import autoDL
import file_stats
import config
from operator import itemgetter

global jobs_lock
jobs_lock = Lock()
global jobs
jobs = []
global job_num
job_num = 0
aria2c_xmlrpc_server = None
max_jobs_active = 1

def savejobs(new_job_list = None):
    global jobs
    jobs_lock.acquire()
    if new_job_list is not None:
        jobs = new_job_list
    f = open("bitflux_jobs.json","w")
    json.dump(jobs, f, indent=4)
    f.close()
    jobs_lock.release()

def loadjobs():
    global jobs
    if os.path.exists("bitflux_jobs.json"):
        jobs_lock.acquire()
        f = open("bitflux_jobs.json","r")
        jobs = json.load(f)
        f.close()
        for a_job in jobs:
            a_job.pop("aria2_id",None)
        jobs_lock.release()
        for a_job in jobs:
            if a_job['status'] == 'start' or a_job['status'] == 'active':
                if not startjob(a_job):
                    jobs_lock.acquire()
                    a_job['status'] = 'paused'
                    jobs_lock.release()
        
    
class PollDownloads(Thread):
    def __init__(self, aria2c_xmlrpc_server):
        Thread.__init__(self)
        self.aria2c_xmlrpc_server = aria2c_xmlrpc_server
    
    def run(self):
        global jobs
        while True:
            try:
                getjobs()
                sorted_jobs = sorted(jobs, key=itemgetter('queue_id')) 
                jobs_running = 0
                for a_job in sorted_jobs:
                    if a_job['status'] == 'active':
                        jobs_running += 1
                    if a_job['status'] == 'waiting':
                        stopjob(a_job)
                        startjob(a_job)
                if jobs_running < max_jobs_active:
                    for a_job in jobs:
                        if a_job['status'] == 'queued':
                            startjob(a_job)
                            break
            except Exception, e:
                config.logging.critical("Exception was thrown in PollDownloads!")
                logging.exception(e)
            time.sleep(5)

def getETA(total, already_dled, speed):
    if float(already_dled) == 0 or float(speed) == 0 or float(total) == 0:
        #cannot calculate at this time
        return 0
    dif = float(total) - float(already_dled)
    eta_seconds = dif / float(speed)
    m, s = divmod(eta_seconds, 60)
    m = int(m)
    s = int(s)
    if m <=0:
        eta = str(s) + "s"
    else:    
        eta = str(m) +"m " + str(s) +"s"
    if m > 60:
        h, m = divmod(eta_seconds, 3600)
        h = int(h)
        m = int(m)
        eta = str(h) +"h " + str(m) +"m"
    return eta

def getjobs():
    for a_job in jobs:
        if 'aria2_id' in a_job:
            try:
                stats = aria2c_xmlrpc_server.aria2.tellStatus(
                        str(a_job['aria2_id']),
                        ['gid','downloadSpeed','completedLength','status','connections']
                        )
            except:
                jobs_lock.acquire()
                config.logging.warn("Failed to retrieve info from aria2 for job: " + str(a_job['id']))
                if a_job['status'] == "complete":
                    a_job.pop("aria2_id",None)
                jobs_lock.release()
                continue
            jobs_lock.acquire()
            for key in stats:
                if key == "status" and stats['status'] == 'paused' and a_job['status'] == 'queued':
                    continue
                a_job[key] = stats[key]
            a_job['eta'] = getETA(a_job['totalLength'],a_job['completedLength'],a_job["downloadSpeed"])
            jobs_lock.release()      
    
    return jobs

def startjob(job):
    if isinstance(job, str) or isinstance(job, int):
        job_id = int(job)
        job = None
        for a_job in jobs:
            if a_job['id'] == job_id:
                job = a_job
        if job == None:
            return False
    if 'aria2_id' in job:
        try:
            aria2c_xmlrpc_server.aria2.unpause(job['aria2_id'])
        except:
            config.logging.warn("Failed to unpause job: " + str(job['id']))
            return False
    else:
        new_download_options = {"dir":job["local_basedir"],
                        "out":job["filename"],
                        "http-passwd":config.PASSWORD,
                        "http-user":config.USERNAME,
                        "file-allocation":'none',
                        "continue":True
                        }
        #try:
        aria2_id = aria2c_xmlrpc_server.aria2.addUri([job['url']],new_download_options)
        #except:
        # log this response
        # print "exception occuring trying to add new job"
        # return False
        if aria2_id != -1:
            job['aria2_id'] = aria2_id
        else:
            config.logging.info("Did not get valid id back from aria2: " + job['filename'])
            return False
    savejobs()
    return True

def stopjob(job):
    if isinstance(job, str) or isinstance(job, int):
        job_id = int(job)
        job = None
        for a_job in jobs:
            if a_job['id'] == job_id:
                job = a_job
        if job is None:
            return False
    if 'aria2_id' in job:
        try:
            aria2c_xmlrpc_server.aria2.pause(job['aria2_id'])
        except:
            # log failure to file
            config.logging.warn("Failed to pause job: " + str(job['id']))
            return False
    else:
        jobs_lock.acquire()
        job['status'] = 'paused'
        jobs_lock.release()
        savejobs()
    return True

def newjob(job_dict):
    if job_dict['autorename'] or 'totalLength' not in job_dict or job_dict['totalLength'] == 0:
        # return 'failed' right now, since this is unsupported
        job_helper = file_stats.JobHelper(job_dict,"http://localhost:8000/bitfluxengine")
        job_helper.daemon = True
        job_helper.start()
        return True
    else:
        job_dict['tmpfilename'] = job_dict["filename"] + ".aria2"
        jobs_lock.acquire()
        job_dict['id'] = len(jobs)
        job_dict['queue_id'] = job_dict['id']
        jobs.append(job_dict)
        jobs_lock.release()
        savejobs()
        if job_dict['status'].endswith("Start"):
            startjob(job_dict)
        if job_dict['status'].endswith("Queue"):
            jobs_lock.acquire()
            job_dict['status'] = 'queued'
            jobs_lock.release()
            savejobs()
        if job_dict['status'].endswith("Stop"):
            jobs_lock.acquire()
            job_dict['status'] = 'paused'
            jobs_lock.release()
            savejobs() 
    return True

def deletejob(job, with_data = False):
    global jobs
    job_ids_to_del = []
    if isinstance(job, str) or isinstance(job, int):
        job_id = int(job)
        job_ids_to_del.append(job_id)
    else:
        for id in job:
            job_ids_to_del.append(int(id))
    jobs_to_remove = []
    for job in jobs:
        if job['id'] in job_ids_to_del:
            jobs_to_remove.append(job)
    
    jobs_lock.acquire()
    was_successful = True
    for a_job in jobs_to_remove:
        if 'aria2_id' in a_job:
            try:
                aria2c_xmlrpc_server.aria2.remove(str(a_job['aria2_id']))
            except:
                if a_job['status'] != 'complete':
                    config.logging.warn("Failed to remove job: " + str(a_job['id']))
                    was_successful = False
                    continue
        if with_data:
            full_path = job["local_basedir"] + a_job["filename"]
            if os.path.exists(full_path):
                os.remove(full_path) 
        # remove temp file no matter what
        tmp_file = job["local_basedir"] + a_job["tmpfilename"]
        if os.path.exists(tmp_file):
            os.remove(tmp_file)
            
        jobs.remove(a_job)
    i = 0
    for job in jobs:
        job['id'] = i
        i += 1
    jobs_lock.release()
    savejobs()
    return was_successful

def status():
    response = {}
    try:
        result = aria2c_xmlrpc_server.aria2.getSessionInfo()
    except:
        result = None
    if result is not None:
        response['arai2c'] = 'online'
    else:
        response['arai2c'] = 'offline'
    
    if autoDLthread.isAlive():
        response['autodler'] = 'online'
    else:
        response['autodler'] ='offline'
        
    if poll_downloads.isAlive():
        response['polling'] = 'online'
    else:
        response['polling'] = 'offline'
    return response

def upqueue(job):
    if isinstance(job, str) or isinstance(job, int):
        job_id = int(job)
        job = None
        for a_job in jobs:
            if a_job['id'] == job_id:
                job = a_job
        if job is None:
            config.logging.warn("Cannot upqueue job with id: " + str(job['id']))
            return False
    if (job['queue_id'] -1) < 0:
        config.logging.warn("Cannot upqueue job with id: " + str(job['id']))
        return False
    sorted_jobs = sorted(jobs, key=itemgetter('queue_id'))
    for a_job in sorted_jobs:
        if a_job['queue_id'] == (job['queue_id'] -1):
            increase_id = a_job['queue_id']
            decrease_id = job['queue_id']
    jobs_lock.acquire()
    for a_job in jobs:
        if a_job['queue_id'] == increase_id:
            a_job['queue_id'] += 1
        elif a_job['queue_id'] == decrease_id:
            a_job['queue_id'] -= 1
    jobs_lock.release()
    return True

def downqueue(job):
    if isinstance(job, str) or isinstance(job, int):
        job_id = int(job)
        job = None
        for a_job in jobs:
            if a_job['id'] == job_id:
                job = a_job
        if job is None:
            config.logging.warn("Cannot downqueue job with id: " + str(job['id']))
            return False
    if (job['queue_id'] + 1) > len(jobs):
        config.logging.warn("Cannot downqueue job with id: " + str(job['id']))
        return False
    sorted_jobs = sorted(jobs, key=itemgetter('queue_id'))
    for a_job in sorted_jobs:
        if a_job['queue_id'] == (job['queue_id'] + 1):
            increase_id = job['queue_id']
            decrease_id = a_job['queue_id']
    jobs_lock.acquire()
    for a_job in jobs:
        if a_job['queue_id'] == increase_id:
            a_job['queue_id'] += 1
        elif a_job['queue_id'] == decrease_id:
            a_job['queue_id'] -= 1
    jobs_lock.release()
    return True

def queuejob(job):
    if isinstance(job, str) or isinstance(job, int):
        job_id = int(job)
        job = None
        for a_job in jobs:
            if a_job['id'] == job_id:
                job = a_job
        if job is None:
            config.logging.warn("Cannot queue job with id: " + str(job['id']))
            return False
    if job['status'] != 'paused':
        if not stopjob(job):
            config.logging.warn("Failed to queue job with id: " + str(job['id']))
            return False
    jobs_lock.acquire()
    job['status'] = 'queued'
    jobs_lock.release()
    savejobs()
    return True

aria2c_cmd_list = [ "aria2c",
                   "-q",
                   '--max-tries=0',
                   "--enable-rpc=true",
                   "--rpc-user=" + config.ARIA2C_XMLRPC_USER,
                   "--rpc-passwd=" + config.ARIA2C_XMLRPC_PASSWORD,
                   "--check-certificate=false",
                   "--always-resume=false",
                   "--max-connection-per-server=10",
                   "--split=10",
                   "--max-concurrent-downloads=10",
                   "--min-split-size=10M"
                   ]
if config.ARIA2C_LOGGING:
    aria2c_cmd_list.append("--log=aria2c.log")
aria2c_xmlrpc_proc = subprocess.Popen(aria2c_cmd_list, stdout=subprocess.PIPE)
time.sleep(1)
aria2c_xmlrpc_server = ServerProxy('http://' + config.ARIA2C_XMLRPC_USER + ':' + config.ARIA2C_XMLRPC_PASSWORD + '@localhost:6800/rpc')
poll_downloads = PollDownloads(aria2c_xmlrpc_server)
poll_downloads.daemon = True
poll_downloads.start()

# Restrict to a particular path.
class RequestHandler(SimpleXMLRPCRequestHandler):
    rpc_paths = ('/bitfluxengine',)

loadjobs()
# Create server
server = SimpleXMLRPCServer(("0.0.0.0", 8000),
                            requestHandler=RequestHandler,logRequests=False)
server.register_introspection_functions()
#register_instance
server.register_function(getjobs)
#register_instance
server.register_function(startjob)
server.register_function(stopjob)
server.register_function(newjob)
server.register_function(deletejob)
server.register_function(queuejob)
server.register_function(status)
server.register_function(upqueue)
server.register_function(downqueue)
server.register_function(autoDL.new_autodl_entry,"newautodl")
server.register_function(autoDL.get_autodls,"getautodls")
server.register_function(autoDL.delete_autodl_entry,"deleteautodl")
autoDLthread = autoDL.AutoDLer("http://localhost:8000/bitfluxengine")
autoDLthread.daemon = True
autoDLthread.start()
import logging
logger = logging.getLogger('xmlrpcserver')
hdlr = logging.FileHandler('bitflux.log')
formatter = logging.Formatter("%(asctime)s  %(levelname)s  %(message)s")
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)

try:
    # Run the server's main loop
    config.logging.fatal("Bitflux engine started")
    server.serve_forever()
except:
    config.logging.fatal("Exception caught, shutting down...")
    savejobs()
    aria2c_xmlrpc_proc.kill()
    
    
