#!/usr/bin/python

from model.data import Task, Client, Workunit, Session
from sqlalchemy import desc
from redis import Redis
import json

class Manager:
    def __init__(self, redis_keys):
        self.session = Session()
        self.redis = Redis()
        self.redis_keys = redis_keys

    def add_client(self, _generated_id, _hashes_per_second):
        """Register a new client with the database, return id"""
        client = Client(hashes_per_second=_hashes_per_second,
                        generated_id = _generated_id,
                        current_workunit_id=0)
        self.session.add(client)
        self.session.commit()
        return client.id

    def add_task(self, _hash, _priority=0, _salt="", _hash_type="",
                 _min_length=0, _max_length=0, _markov_low=0,
                 _markov_high=0):
        """Register a new task with the database"""
        task = Task(priority=_priority, complete=False, salt=_salt,
                    hash_type=_hash_type, hash=_hash, plaintext="",
                    min_length=_min_length, max_length=_max_length,
                    markov_lowest=_markov_low, markov_highest=_markov_high)
        # TODO: determine number or markov combinations to try (subprocess will
        #       be needed), and set in db
        self.session.add(task)
        self.session.commit()

    def assign_workload(self, _client_id):
        """Create a new workload for the given client and return the json to
           send to the client, return {None} if no workloads needed"""
        # Retrieve client from database
        client = self.session.query(Client).filter_by(id=_client_id).first()

        # Determine number of hashes for the client to check based on desired
        # time in seconds and number of hashes client can check per second
        # TODO: the constant used here needs to be set from config
        number_to_check = 300 * client.hashes_per_second

        # Cycle through list of tasks until one is incomplete and has not been
        # completely parted out
        task = self.session.query(Task).filter_by(complete=False, \
                all_assigned=False).order_by(priority).first()

        # What is the farthest hash that has been assigned?
        # TODO: I'm only considering markov right now, need to consider
        #       incremental mode again
        old_unit = self.session.query(Workunit).filter_by(task_id=task.id) \
                .order_by(desc(markov_end)).first()

        # Find new endpoints, if it finishes task then tell db
        if old_unit is None:
            _markov_start = 0
        else:
            _markov_start = old_unit.markov_end

        if _markov_start + number_to_check > task.markov_possibles:
            _markov_end = 0
            task.all_assigned = True
        else:
            _markov_end = _markov_start + number_to_check

        # Create workunit, write to db, get json and return
        unit = Workunit(task_id=task.id, client_id=_client_id, complete=False,
                        markov_start=_markov_start, markov_end=_markov_end)
        self.session.add(unit)
        self.session.commit()
        return unit.to_client()

    def complete(self, task_id, plaintext=None):
        """Mark a task complete when keyspace to search is exhausted or when
           solution is found"""
        task = session.query(Task).filter_by(id=task_id).first()
        task.comlete = True
        if plaintext is not None:
            task.plaintext = plaintext
        self.session.commit()

    def run(self):
        while True:
            tsk = self.redis.blpop(self.redis_keys)
            key, func, args = json.loads(msg[1])
            result = getattr(self, "func")(*args)
            self.redis.set(key, dumps(result))
