#import sys
#sys.path.append("/home/dexen/workspace/dexen_on_google")


import os
import sys
import time
import types
import cPickle
from zipfile import ZipFile

from multiprocessing import Process, Value, Queue

from dexen.system import util, constants
from dexen.system.client.looper import loop_task
from dexen.system.client.rpc_api import FrontendAPI
from dexen.user.libs import server_api



""" GLOBALS """
tasks = {}
script_to_id = {}
frontend_api = None
url = ""
id = 0

def _zip_schema(dir, zip_name):
    prev_path = os.getcwd()
    os.chdir(dir)   
    if os.path.exists(zip_name):
        os.remove(zip_name)
    z = ZipFile(zip_name, "w")
      
    #print "zipping files ...."
    for root, dirs, files in os.walk("."):
        #print root, dirs, files, "***\n\n"
        #if root in exception_list:
        #    continue
        if root.find(".svn") != -1:
            continue       
        for file in files:
            if file != zip_name:
                #print os.path.join(root, file)
                z.write(os.path.join(root, file))  
    z.close()
    os.chdir(prev_path)

def _download_schema(schema_name):
    #print "Downloading Schema......................"
    schema = frontend_api.get_schema(schema_name)
    prev_path = os.getcwd()
    os.chdir(constants.TEMP_SCHEMA_DIR)    
    if not os.path.exists(schema_name):
        os.mkdir(schema_name)
    util.unzip(schema[0], schema_name + "/") # unzip will clear everything inside first
    
    # Dumping the new timestamp in to the meta file
    file = open(os.path.join(schema_name, constants.METAFILE), "wb")
    cPickle.dump(schema[1], file)
    file.close()
    
    os.chdir(prev_path)

def _remove_old_entries(location, cur_entries, exc_entries=None):
    """Remove old entries.
    
    A helper method that browses into the 'location' removes all the entries 
    that are not in 'cur_entries' and 'exc_entries'. 
    
    """
    if not os.path.exists(location):
        print "The old entry path does not exist."
        return
    
    if not exc_entries:
        exc_entries = [".svn", "__init__.py", "__init__.pyc", constants.METAFILE]

    prev_path = os.getcwd()
    os.chdir(location)  
    entries = os.listdir(".")  
    for entry in entries:
        if entry not in cur_entries and entry not in exc_entries:
            util.rmentry(entry)
    os.chdir(prev_path)

def _remove_old_outputs():
    """Remove old output folders.
    
    A helper method that removes the folders in 'outputs' whose respective 
    schema or population do not exist in the database:
    - Removes all the population outputs of the schema if the schema does not 
    exist in the database.
    - Removes the population outputs of a schema where the population does not 
    exist in the database.
     
    """
    schema_names = frontend_api.get_schema_names()
    _remove_old_entries(constants.OUTPUT_DIR, schema_names)

    for schema_name in schema_names:
        pop_names = get_population_names(schema_name)
        _remove_old_entries(os.path.join(constants.OUTPUT_DIR, schema_name), pop_names)
            
def _remove_old_schemas():
    """Remove old schema folders.
    
    A helper method that removes the schema folders in 'temp' that do not exist 
    in the database anymore.
    
    """
    schema_names = frontend_api.get_schema_names()
    _remove_old_entries(constants.TEMP_SCHEMA_DIR, schema_names)

def _synch_schema_temp(schema_name):
    prev_path = os.getcwd()
    os.chdir(constants.TEMP_SCHEMA_DIR)
    temp_schemas = os.listdir(".") 
    if schema_name not in temp_schemas: 
        _download_schema(schema_name)
    else: #make sure name is a folder
        server_ts = frontend_api.get_schema_timestamp(schema_name)
        file = open(os.path.join(schema_name, constants.METAFILE), "rb")
        local_ts = cPickle.load(file)
        file.close()
        #print "timestamps:................ ", server_ts, local_ts
        #print datetime.datetime.fromtimestamp(server_ts)
        
        if server_ts != local_ts: # means the schema is outdated
            _download_schema(schema_name)
        else: 
            pass # means it is fresh copy
    os.chdir(prev_path)


def _synch_schema_outputs(schema_name):
    path = os.path.join(constants.OUTPUT_DIR, schema_name)
    schema_ts = frontend_api.get_schema_timestamp(schema_name)
    
    if os.path.exists(path):
        file = open(os.path.join(path, constants.METAFILE), "rb")    
        local_ts = cPickle.load(file)
        file.close()
        if schema_ts != local_ts: # The schema is outdated
            util.rmentry(path) # It removes the root folder as well
            os.mkdir(path)
    else:
        os.mkdir(path)
    
    # Dumps the creation time of the schema into the schema output folder
    file = open(os.path.join(path, constants.METAFILE), "wb")
    cPickle.dump(schema_ts, file)
    file.close()            
        
def _synch_pop_outputs(schema_name, pop_name):        
    path = os.path.join(constants.OUTPUT_DIR, schema_name, pop_name)
    pop_ts = frontend_api.get_population_creation_time(schema_name, pop_name)
    
    if os.path.exists(path):
        file = open(os.path.join(path, constants.METAFILE), "rb")    
        local_ts = cPickle.load(file)
        file.close()
        if pop_ts != local_ts: # The pop is obsolete 
            util.rmentry(path) # It removes the root folder as well
            os.mkdir(path)
    else:
        os.mkdir(path)
    
    image_path = os.path.join(path, constants.IMAGE_DIRNAME)
    model_path = os.path.join(path, constants.MODEL_DIRNAME)
    if not os.path.exists(image_path): os.mkdir(image_path)
    if not os.path.exists(model_path): os.mkdir(model_path)
    
    # Dumps the creation time of the population into the population output folder
    file = open(os.path.join(path, constants.METAFILE), "wb")
    cPickle.dump(pop_ts, file)
    file.close()


def connect_to_server(_url):
    """Connect to server.
    
    A method:
    1 - creates the FrontendAPI which sets up an rpyc connection to 
    the server.
    2 - remove schema and output (image, model) folders of the schema that does
    not exist in the database anymore.
    
    """
    global frontend_api, url
    #print "Connecting to Server......................."
    url = _url
    frontend_api = FrontendAPI(url)
    _remove_old_schemas()
    _remove_old_outputs()

def reset_all_databases():
    """Reset all the databases.
    
    A method that drops all the tables in 'config.db' as well as in dynamic 
    databases (i.e. 'pop.db').
    
    """    
    frontend_api.reset_all_databases()

def create_new_schema(schema_name, schema_folder):
    """Create a new schema.
    
    A method that creates a zip file of the schema_folder and sends it to the 
    server. A new schema is created in the server with the name schema_name. 
    
    """
    _zip_schema(os.path.join(constants.SCHEMAS_DIR, schema_folder), 
                schema_name+".zip")
    file_path = os.path.join(constants.SCHEMAS_DIR, schema_folder, 
                schema_name+".zip")
    f = open(file_path, "rb")
    data = f.read()
    f.close()
    last_modified_time = os.path.getmtime(file_path) #last modified time 
                                                     #in seconds since Epoch
    os.remove(file_path) # removes the newly created zip file
    message = frontend_api.set_schema(schema_name, data, last_modified_time)

def getTaskState(id):
    global tasks
    task = {}
    task["id"] = id
    
    if tasks[id]["state"].value == constants.RUNNING:
        task["state"] = "Running"
    else:
        task["state"] = "Stopped"
    
    qsize = tasks[id]["console_out"].qsize()
    temp = []
    while(qsize > 0):
        if tasks[id]["console_out"].qsize() == 0: break
        temp.append(tasks[id]["console_out"].get())
        qsize -= 1
    task["console_out"] = "".join(temp)
    
    qsize = tasks[id]["graph_out"].qsize()
    temp = []
    while(qsize > 0):
        if tasks[id]["graph_out"].qsize() == 0: break;
        temp.append(tasks[id]["graph_out"].get())
        qsize -= 1
    task["graph_out"] = temp
        
    return task

"""
TO BE CALLED PERIODICALLY BY FLEX
"""
def getTaskStates():
    global tasks
    result = []
    for item in tasks.items():
        id = item[0]
        result.append(getTaskState(id))
    return result


def run_task(schema_name, pop_name, script_name, run_type):
    global url, id, tasks
    
    _synch_schema_temp(schema_name)    
    _synch_schema_outputs(schema_name)
    _synch_pop_outputs(schema_name, pop_name)
    
    
    file_name, function_name = frontend_api.\
                               get_script_file_function_names(schema_name, pop_name,
                                                          script_name)
    
    state = Value('i', constants.RUNNING)  
    console_out = Queue(-1)
    graph_out = Queue(-1)
    
    task_process = Process(target=loop_task, 
                           args=(schema_name, pop_name, script_name, file_name, 
                                 function_name, url, run_type, state, 
                                 console_out, graph_out, id))
    
    task_process.start()
    print "PID is", task_process.pid

    script_to_id["id:%s, schema:%s, population:%s, script:%s" 
                 %(str(id), schema_name, pop_name, script_name)] = id

    tasks[id] = {}
    tasks[id]["schema_name"] = schema_name
    tasks[id]["pop_name"] = pop_name
    tasks[id]["script_name"] = script_name
    tasks[id]["id"] = id
    tasks[id]["state"] = state
    tasks[id]["process"] = task_process
    tasks[id]["console_out"] = console_out
    tasks[id]["graph_out"] = graph_out
    
    """
    FOR FLEX GUI
    """
    task = {
        "schema_name" : schema_name,
        "pop_name" : pop_name,
        "script_name" : script_name,
        "id" : id
    }
    id += 1
    return task

#Used by CLI
def stopScript(script_identifier):
    global tasks
    id = script_to_id[script_identifier]
    script_to_id.pop(script_identifier)
    tasks[id]["state"].value = constants.STOPPED

#Used by GUI
def stop_task(id):
    global tasks
    tasks[id]["state"].value = constants.STOPPED
    #print "I am in stopTask in frontend"
    frontend_api.stop_script(tasks[id]["schema_name"], tasks[id]["pop_name"], 
                            tasks[id]["script_name"])
    return id

def run_config_function(schema_name, pop_name, func_name):
    """Run config function.
    
    A method that calls the function 'func_name' in 'config.py'. Then the 
    individual table is created in the dynamic 'pop.db'.
    
    """
    pop_ts = time.time() #The timestamp of population creation 
    frontend_api.set_population(schema_name, pop_name, pop_ts)
    server_api._set_attributes(url, schema_name, pop_name)

    config_module = __import__("dexen.user.schemas." + schema_name + ".config",
                               fromlist=[""])
    config_func = config_module.__dict__.get(func_name)
    config_func()
    frontend_api.set_individual_table(schema_name, pop_name)

def get_schema_names():
    return frontend_api.get_schema_names()

def get_population_names(schema_name):
    return frontend_api.get_population_names(schema_name)

def getScriptNames(schema_name, pop_name):
    return frontend_api.get_script_names(schema_name, pop_name)

def getLocalSchemaFolderNames():
    prev_path = os.getcwd()
    os.chdir(constants.SCHEMAS_DIR)
    entries = os.listdir(".")
    exception_entries = [".svn", "__init__.py", "__init__.pyc"]
    schema_folder_names = []
    for entry in entries:
        if os.path.isdir(entry) and not (entry in exception_entries):
            schema_folder_names.append(entry)
    os.chdir(prev_path)
    return schema_folder_names

def getLocalSchemaFileNames(schema_name):
    """ BE CAREFUL """
    _synch_schema_temp(schema_name) 
    prev_path = os.getcwd()
    os.chdir(os.path.join(constants.TEMP_SCHEMA_DIR, schema_name))
    entries = os.listdir(".")
    exception_entries = [".svn", "__init__.py", "__init__.pyc", ".dexen"]
    schema_file_names = []
    for entry in entries:
        if os.path.isfile(entry) and not (entry in exception_entries):
            schema_file_names.append(entry)
    os.chdir(prev_path)
    return schema_file_names

def _update_running_script_list():
    for key in script_to_id.keys()[:]:
        if states[script_to_id[key]].value == constants.STOPPED:
            script_to_id.pop(key)
            
def getLocalRunningScripts():
    _update_runnig_script_list()
    return script_to_id.keys()

def assign_pop_name(schema_name, file_name=None, func_name=None):
    pop_names = get_population_names(schema_name)
    if file_name != None:
        name = file_name[:-3] # strip out the .py suffix
    elif func_name != None:
        name = func_name
        
    cnt = 0
    for pop_name in pop_names:
        if pop_name.startswith(name):
            pop_no = int(pop_name.split("_")[-1])
            if pop_no > cnt:
                cnt = pop_no
    cnt += 1
    new_pop_name = name + "_" + "pop" + "_" + str(cnt)
    return new_pop_name

def getAllPopulationNames():
    all_pop_names = []
    schema_names = get_schema_names()
    for schema_name in schema_names:
        pop_names = get_population_names(schema_name)
        all_pop_names.extend([schema_name + ", " + pop_name for pop_name in pop_names])
    return all_pop_names

def runPopulation(schema_name, pop_name):
    script_names = getScriptNames(schema_name, pop_name)
    for script_name in script_names:
        run_type = frontend_api.get_script_run_type(schema_name, pop_name, script_name)
        run_task(schema_name, pop_name, script_name, run_type)

def _getModuleFunctions(module_path):
    result = []
    module = __import__(module_path, fromlist=[""])
    for entry in dir(module):
        if isinstance(module.__dict__.get(entry), types.FunctionType) and\
           not entry.startswith("_"):
            result.append(module.__dict__.get(entry))
    return result

def _getModuleFunctionNames(module_path):
    result = []
    module_funcs = _getModuleFunctions(module_path)
    for func in module_funcs:
        result.append(func.__name__)
    return result

def _getModuleFunction(module_path, func_name):
    module = __import__(module_path, fromlist=[""])
    return getattr(module, func_name)

def getAutoStartFunctionNames(schema_name):
    result = _getModuleFunctionNames("dexen.user.schemas." + schema_name + ".autostart")
    return result

def run_batch_tasks(schema_name, pop_name, func_name):
    func = _getModuleFunction("dexen.user.schemas." + schema_name + ".autostart", func_name)
    scripts = func()
    tasks = []
    for script in scripts:
        task = run_task(schema_name, pop_name, script[0], script[1])
        tasks.append(task)
    return tasks

def getConfigFunctionNames(schema_name):
    return _getModuleFunctionNames("dexen.user.schemas." + schema_name + ".config")

def deletePopulation(schema_name, pop_name):
    frontend_api.delete_population(schema_name, pop_name)

def main():
    pass
    connect_to_server("localhost")
    create_new_schema("packing_boxes_v1", "packing_boxes_v1")
    
if __name__ == "__main__":
    main()
                