package cz.cuni.amis.pogamut.episodic.episodes;

import cz.cuni.amis.pogamut.episodic.decisions.Node;
import cz.cuni.amis.pogamut.episodic.decisions.NodeType;
import cz.cuni.amis.pogamut.episodic.memory.Parameters;
import cz.cuni.amis.pogamut.episodic.schemas.*;
import java.io.Serializable;
import java.util.Map.Entry;
import java.util.*;

/**
 * The <code>EpisodeNode</code> is what represents the memory of an action
 * being executed. Episode node is memorized equivalent of <code>Intention</code>,
 * <code>Action</code> or <code>AtomicAction</code> from decision tree.
 * <p>
 * It can have several subnodes. Node wihout subnodes is an atomic action.
 * Node without parent is a top-level goal. Actions and intentions may
 * have associated affordance slots that have to be filled in order to
 * successfully finish the action.
 * <p>
 * A tree made of <code>EpisodeNode</code>s is what makes the basis of any
 * <code>Episode</code>.
 *
 * @author Michal Cermak
 */
public class EpisodeNode implements Serializable, Comparable<EpisodeNode> {
    /**
     * Determines if a de-serialized file is compatible with this class.
     *
     * Maintainers must change this value if and only if the new version
     * of this class is not compatible with old versions. See Sun docs
     * for <a href=http://java.sun.com/products/jdk/1.1/docs/guide
     * /serialization/spec/version.doc.html> details. </a>
     *
     * Not necessary to include in first version of the class, but
     * included here as a reminder of its importance.
     */
    private static final long serialVersionUID = 1L;

    /**
     * ID of this node. Used as an ID of vertex representing
     * this node when visualizing given Chronobag.
     */
    private final int id;

    /**
     * Name of the intention, action or atomic action that is
     * represented by this node. Same as the name of an associated node.
     */
    private final String name;

    /**
     * Equivalent of this node in the decision tree. It is a reference to
     * the node that was chosen to be performed and a way to access
     * the associated <code>SchemaEpisodicNode</code>.
     */
    public final Node associatedNode;

    /**
     * Reference to a parent <code>EpisodeNode</code> in the episode tree.
     * Only top-level goal (root of an episode) does not have a parent.
     */
    private EpisodeNode parent = null;

    /**
     * Reference the <code>Episode</code> structure representing whole
     * episode this node belongs to. Via this reference it is possible to
     * access outer memories.
     */
    private Episode root;

    /**
     * Map of <code>EpisodeNode</code>s that are subnodes of this node
     * in the episode tree. Node without children is an atomic action.
     * <p>
     * The map is indexed by the name of the child node.
     */
    HashMap<String, EpisodeNode> children = new HashMap<String, EpisodeNode>();

    /**
     * Reference to a <strong>direct child</strong> node that was executed <strong>first</strong>. Used to
     * provide the correct temporal sequence of child nodes.
     */
    private Map<Integer, EpisodeNode> firstChild = new HashMap<Integer, EpisodeNode>();

    /**
     * Reference to a <strong>direct child</strong> node that was executed <strong>last</strong>. Used to
     * provide the correct temporal sequence of child nodes.
     */
    private Map<Integer, EpisodeNode> lastChild = new HashMap<Integer, EpisodeNode>();

    /**
     * Reference to a <strong>sibling</strong> node (node with same parent) that was executed
     * <strong>first after</strong> this node. Used to provide the correct temporal sequence
     * of child nodes.
     */
    private Map<Integer, EpisodeNode> successors = new HashMap<Integer, EpisodeNode>();

    /**
     * Reference to a <strong>sibling</strong> node (node with same parent) that was executed
     * <strong>last before</strong> this node. Used to provide the correct temporal sequence
     * of child nodes.
     */
    private Map<Integer, EpisodeNode> predecessors = new HashMap<Integer, EpisodeNode>();

    /**
     * Collection of all affordance slots that attached to this node.
     * Slots are in a map indexed by their type.
     */
    HashMap<String, ObjectSlot> slots = new HashMap<String, ObjectSlot>();

    /**
     * Indicator whether this node was correctly finished.
     * Node is finished when an agent decides to throw it out of short-term
     * memory, because it was completed or could not be completed.
     * <p>
     * Nodes in finished episodes can contain unfinished nodes if they were
     * interrupted in an unusual way (top-level goal suddenly disappeared
     * from short-term memory).
     */
    public boolean finished = false;

    /**
     * Indicator whether this node was successfully finished.
     */
    public boolean succeeded = false;

    /**
     * Score of a node. Used to determine if it is okay to forget the node.
     */
    private double score;

    int numberOfSubNodes = 0;
    int numberOfSubNodesWithObjects = 0;

    private final int countAtCreation;

    long timeStart;
    long timeFinish;

    boolean consumed = false;

    /**
     * Instantiate the class by providing its name, reference to a parent
     * <code>EpisodeNode</code>, parent <code>Episode</code> and associated
     * decision node.
     * <p>
     * The ID of a chronobag is also generated in constructor.
     *
     * @param   _name   Name of the node.
     * @param   _parent Reference to parent <code>EpisodeNode</code>.
     * @param   _root   Reference to parent <code>Episode</code>.
     * @param   associated  Reference to associated decision node.
     */
    public EpisodeNode(String _name, EpisodeNode _parent, Episode _root, Node associated) {
        name = _name;
        parent = _parent;
        root = _root;
        associatedNode = associated;
        score = root.mem.parameters.MAX_NODE_SCORE;

        id = root.idGen.getNewId();
        if (associated == null) countAtCreation = 0;
        else {
            if (associated.getAssociatedNode() == null) {
                countAtCreation = 1;
            } else {
                countAtCreation = associated.getAssociatedNode().getSingleCount(0, 0) + 1;
            }
        }
    }

    public EpisodeNode(EpisodeNode other, EpisodeNode _parent, Episode e) {
        name = other.name;
        parent = _parent;
        root = e;
        associatedNode = other.associatedNode;
        id = e.idGen.getNewId();
        countAtCreation = other.countAtCreation;
        numberOfSubNodes = other.numberOfSubNodes;
        numberOfSubNodesWithObjects = other.numberOfSubNodesWithObjects;
     //   assert (numberOfCommonSubNodesWithObjects(this) == numberOfSubNodesWithObjects+1);
        succeeded = other.succeeded;
        finished = other.finished;
        timeStart = other.timeStart;
        timeFinish = other.timeFinish;
    }

    /**
     * Returns a <code>String</code> object representing this
     * <code>EpisodeNode</code>'s info. It is used to provide detailed
     * information about a node when it is invoked from
     * the visualization structures.
     *
     * @return  a string representation of the value of this object.
     */
    @Override
    public String toString() {
        String newline = System.getProperty("line.separator");

        String str = "";
        str = "Name: " + name;
        str += newline + "Started: "+timeStart;
        str += newline + "Finished: "+timeFinish;
        str += newline + "Succeeded: "+succeeded;
        str += newline + "Associated node: ";
        if (getAssociatedNode() == null) {
            str += "null";
        } else {
            try {
                str += getAssociatedNode().getId() + " " + getAssociatedNode().getName();
            } catch (Exception e) {
                System.err.println("Could not retrieve id of associated node (node id: "+getId()+").");
                System.err.println(e.getMessage());
            }
        }
        return str;
    }

    /**
     * Marks specified episodic node as finished. The node has to be in episode
     * subtree represented by current node. Remaining path to the node is stored
     * in a trace parameter. Returns true if node was successfully located.
     *
     * @param   node    Name of a node that was finished.
     * @param   trace   Names of traces nodes that lead to the finished node,
     * beginning with child of current node. Should be empty if node
     * to be finished is current node.
     * @param   succeeded   True if node was finished successfully.
     * @param   wipe        Indicates whether we're in wipe mode already.
     * @param   wipeStart   Indicates the index starting from which we should wipe.
     * and therefore we should clear the subtree.
     * @return  Returns true node was successfully located and marked,
     * false otherwise.
     */
    public boolean finishNode(String node, ArrayList<String> trace, boolean _succeeded, boolean wipe, int wipeStart) throws Exception {
        // Recursion stop.
        if (trace.size() == wipeStart) {
            if (!wipe) {
                // Funny story, it appears that node has never been a part of trace. Err out.
                throw new Exception("Error: trying to finish a node that is not in the current trace.");
            }
            finished = true;
            succeeded = _succeeded;
            // Reached the end of recursion.
            return true;
        }
        if (wipe) {
            // Wipe mode. The node indicated in arguments has already been found, purge everything below.
            // Pick the first child and finish it.
            String child = trace.remove(wipeStart);
            getChild(child).finishNode(node, trace, _succeeded, wipe, wipeStart);
        } else {
            // Search mode. We're still looking for the node indicated by args.
            if (node.equals(trace.get(wipeStart))) {
                // Found, let's start wiping from down here.
                String child = trace.remove(wipeStart);
                getChild(child).finishNode(node, trace, _succeeded, true, wipeStart);
            } else {
                String child = trace.get(wipeStart);
                getChild(child).finishNode(node, trace, _succeeded, false, wipeStart+1);
            }
        }
        return true;
    }

    /**
     * Getter method for the <code>id</code> variable.
     *
     * @return Returns ID of this node. Used as an ID of vertex representing
     * this node when visualizing Chronobag View.
     */
    public int getId() {
        return id;
    }

    /**
     * Calculates number of common episodic subnodes and common objects used
     * in slots on these nodes.
     *
     * @param other Root of the other subtree. (This node being root of
     * the first one).
     * @return  Returns count of common nodes and objects.
     */
    public int numberOfCommonSubNodesWithObjects(EpisodeNode other) {
        int sum = 0;
        if (other.name.equals(name)) {
            sum++;
        } else return sum;

        for (EpisodeNode node : children.values()) {
            EpisodeNode temp = node;
            if (other.children.containsKey(temp.name)) {
                //child in both subtrees
                sum += temp.numberOfCommonSubNodesWithObjects(other.getChild(temp.name));
            }
        }

        for (ObjectSlot slot : slots.values()) {
            if (other.slots.containsKey(slot.getType())) {
                //slot on both nodes
                Collection<ObjectNode> col1 = slot.getUsedObjects();
                Collection<ObjectNode> col2 = other.slots.get(slot.getType()).getUsedObjects();
                for (ObjectNode obj : col1) {
                    if (col2.contains(obj)) sum++;
                }
            }
        }
        return sum;
    }

    /**
     * Getter method for the <code>name</code> variable.
     *
     * @return Returns name of the intention, action or atomic action that is
     * represented by this node. Same as the name of an associated node.
     */
    public String getName() {
        return name;
    }

    /**
     * Getter method for the <code>id</code> variable.
     *
     * @return Returns a reference to a parent <code>EpisodeNode</code> in the episode tree.
     * Only top-level goal (root of an episode) does not have a parent.
     */
    public EpisodeNode getParent() {
        return parent;
    }

    /**
     * Getter method for the <code>id</code> variable.
     *
     * @return Returns a reference to equivalent of this node in the decision
     * tree. It is a reference to the node that was chosen to be performed
     * and a way to access the associated <code>SchemaEpisodicNode</code>.
     */
    public Node getAssociatedNode() {
        return associatedNode;
    }

    /**
     * Getter method for the <code>slots</code> variable.
     *
     * @return Returns a collection of all affordance slots that attached to this node.
     */
    public Collection<ObjectSlot> getObjectSlots() {
        return slots.values();
    }

    /**
     * Method returning one specified <code>objectSlot</code> attached to this
     * episodic node.
     *
     * @return Returns a one specified affordance slots that attached to this node.
     * Returns <code>null</code> if no slot of specified type is attached to this node.
     */
    public ObjectSlot getObjectSlot(String type) {
        return slots.get(type);
    }

    /**
     * This method adds a new subnode to the list of node's children.
     * If such node already exists there, it does nothing.
     * It is responsible for initializing all the necessary variables
     * of a newly created node and of maintaining correct sequence
     * of child nodes of a parent node.
     *
     * @param   name    Name of a subnode to be added.
     * @param   time    Time of execution of this node.
     * @return  True if new node was created. False if subnode with such
     * name already existed in the <code>children</code> map.
     */
    public boolean addChildNode(String name, long time) {
        if (children.containsKey(name)) {
            children.get(name).timeFinish = time;
            return false;
        }
        Node associated = null;
        if (associatedNode != null) associated = (Node)associatedNode.getSubNode(name);
    /*    if (associatedNode.getAssociatedNode() == null) {
            SchemaEpisodeNode s = root.getParentChronobag().getMemory().getSchemaBag().getSchemaENode(name);
            associatedNode.setAssociatedSchemaNode(s);
        }*/
        EpisodeNode node = new EpisodeNode(name, this, this.root, associated);
        node.timeStart = time;
        node.timeFinish = time;

        EpisodeNode parentNode = this;
        while (parentNode != null) {
            parentNode.numberOfSubNodes++;
            parentNode.numberOfSubNodesWithObjects++;
            parentNode = parentNode.parent;
        }
        if (root.getParentChronobag() != null) {
            root.getParentChronobag().numberOfEpisodeNodes++;
            root.getParentChronobag().newNodeAdded = true;
        }

        if (children.isEmpty()) firstChild.put(root.idEpisode, node);
        children.put(name, node);
        if (lastChild.containsKey(root.idEpisode) && lastChild.get(root.idEpisode) != null) {
            lastChild.get(root.idEpisode).successors.put(root.idEpisode, node);
            node.predecessors.put(root.idEpisode, lastChild.get(root.idEpisode));
        }
        lastChild.put(root.idEpisode, node);

        return true;
    }

    /**
     * Method that returns specified <code>EpisodeNode</code>s that is direct
     * subnode of this node in the episode tree. Node cannot have two children
     * with identical name, so there will not be a conflict between several
     * children.
     *
     * @param   name    Specifies the name of the subnode to be returned.
     * @return  Returns direct child of this node with specified name, null
     * if there is no such child.
     */
    public EpisodeNode getChild(String name) {
        if (!children.containsKey(name)) return null;
        return children.get(name);
    }

    /**
     * Returns all <code>EpisodeNode</code>s that are subnodes of this node
     * in the episode tree. Node without children is an atomic action.
     *
     * @return  A collection of all <code>EpisodeNode</code>s that are direct
     * subnodes of this node.
     */
    public Collection<EpisodeNode> getChildrenNodes() {
        return children.values();
    }

    /**
     * Returns SchemaEpisodeNodes for the subroot.
     */
    public Collection<SchemaEpisodeNode> getChildrenSchemaNodes() {
        Collection<SchemaEpisodeNode> col = new ArrayList<SchemaEpisodeNode>();
        for (EpisodeNode e : getChildrenNodes()) {
            if ((e.associatedNode != null) && (e.associatedNode.getAssociatedNode() != null)) {
                col.add(e.associatedNode.getAssociatedNode());
            }
        }

        return col;
    }

    /**
     * Getter method for the <code>root</code> variable.
     *
     * @return Reference to the <code>Episode</code> structure representing whole
     * episode this node belongs to. Via this reference it is possible to
     * access outer memories.
     */
    public Episode getEpisodeRoot() {
        return root;
    }

    /**
     * A method that attaches new affordance slot to the current episodic
     * node. Slots are indexed by their type, there cannot be two slots of
     * the same type attached to one node, so if the slot of specified
     * type already exists under this node, no new slot is added.
     *
     * @param   type    String representing the type of the affordance needed
     * to fill the new slot.
     */
    public void addSlot(String type) {
        if (slots.containsKey(type)) return;
        slots.put(type, new ObjectSlot(root.idGen.getNewId(), type, this));
        if (root.getParentChronobag() != null) {
            root.getParentChronobag().newNodeAdded = true;
        }
    }

    /**
     * Fills specified slot with a specified item. <strong>Both
     * <code>ObjectSlot</code> and <code>ObjectNode</code>
     * representing item must be already created when calling
     * this method.</strong>
     *
     * @param   type    String specifying the type of affordance used.
     * @param   item    Name of an item that will fill the object slot.
     */
    public void fillSlot(String type, String item) {
        ObjectSlot slot = slots.get(type);
        ObjectNode obj = root.getParentChronobag().objectNodes.get(item);

        root.getParentChronobag().newNodeAdded = true;
        boolean added = slot.addObject(obj, true);
    }

    /**
     * Getter method for the <code>successor</code> variable.
     *
     * @return Reference to a <strong>sibling</strong> node (node with same parent) that was executed
     * <strong>first after</strong> this node. Used to provide the correct temporal sequence
     * of child nodes.
     */
    public Map<Integer, EpisodeNode> getSuccessor() {
        return successors;
    }

    /**
     * Getter method for the <code>predecessor</code> variable.
     *
     * @return Reference to a <strong>sibling</strong> node (node with same parent) that was executed
     * <strong>last before</strong> this node. Used to provide the correct temporal sequence
     * of child nodes.
     */
    public Map<Integer, EpisodeNode> getPredecessor() {
        return predecessors;
    }

    /**
     * Getter method for the <code>firstChild</code> variable.
     *
     * @return Reference to a <strong>direct child</strong> node that
     * was executed <strong>first</strong>. Used to
     * provide the correct temporal sequence of child nodes.
     */
    public Map<Integer, EpisodeNode> getFirstChild() {
        return firstChild;
    }

    /**
     * Getter method for the <code>score</code> variable.
     *
     * @return Returns actual score of a node. Used to determine whether
     * the node can be deleted and forgotten.
     */
    public double getScore() {
        return score;
    }

    /**
     * Method used to compute new score of a node. Used to determine whether
     * the node can be forgotten.
     *
     * @return  Returns the new score value of this node.
     */
    public double calculateScore(boolean isPresent) {
        if (associatedNode == null) {
            score = 0;
            return score;
        }

        // For forgetting purposes, we use the global schema.
        double actualCount = root.mem.schemas.getSchemaENode(associatedNode.getName()).getSingleCount(0, 0);
//        double actualCount = associatedNode.getAssociatedNode().getSingleCount();
        double creationCount = countAtCreation;
    //    age is not necessary because all nodes in chronobag has same age - age determines K
    //    double age = root.getParentChronobag().getAge().getMinAge();
        double level = root.getParentChronobag().getLevel();
        boolean bottom = children.isEmpty();
        double attractivity = associatedNode.getAttractivity();
        double maxSlotScore = 0;
        //node is automatically derivable if it has a child
        double derivability = 1;
        if (root.getParentChronobag().end > root.mem.parameters.EPISODE_DERIVE_FORGET_THRESHOLD) {
            // If this Chronobag ended more than THRESHOLD days ago, don't derive nodes.
            derivability = 0;
        } else {
            if (bottom && isPresent) {
                // Only call this for leaves, and only for present chronobags.
                derivability = nodeDerivability();
            }
        }
        for (ObjectSlot s : slots.values()) {
            for (Double d : s.getScore()) {
                if (d > maxSlotScore) maxSlotScore = d;
            }
            //if (s.getScore() > maxSlotScore) maxSlotScore = s.getScore();
        }

        score = 20;//0
        score += (1 - derivability) * root.mem.parameters.NODE_SCORE_DERIVABILITY_WEIGHT;
        score += Math.max(0, root.mem.parameters.NODE_SCORE_ACTUAL_COUNT_WEIGHT - actualCount + 1);
        score += Math.max(0, root.mem.parameters.NODE_SCORE_CREATION_COUNT_WEIGHT - 3 * creationCount + 1);
        if (!bottom) score += root.mem.parameters.NODE_SCORE_BOTTOM_WEIGHT;
        score += attractivity / 100 * root.mem.parameters.NODE_SCORE_ATTRACTIVITY_WEIGHT;
        score += maxSlotScore / root.mem.parameters.MAX_NODE_SCORE * root.mem.parameters.NODE_SCORE_AFFORDANCES_WEIGHT;

        return score;
    }

    public EpisodeNode createCopy(EpisodeNode parent, Episode e, boolean temporary) {
        assert validateNode(this): id;

        EpisodeNode n = new EpisodeNode(this, parent, e);
        EpisodeNode newChild;

        Queue<EpisodeNode> q = new LinkedList<EpisodeNode>();
        q.addAll(children.values());
        EpisodeNode child;
        while (!q.isEmpty()) {
            child = q.poll();
            newChild = child.createCopy(n, e, temporary);
            n.children.put(newChild.name, newChild);
        }
        for (Map.Entry<Integer, EpisodeNode> entry : firstChild.entrySet()) {
            n.firstChild.put(entry.getKey(), n.children.get(firstChild.get(entry.getKey()).name));
        }
        for (Map.Entry<Integer, EpisodeNode> entry : lastChild.entrySet()) {
            n.lastChild.put(entry.getKey(), n.children.get(lastChild.get(entry.getKey()).name));
        }
        for (EpisodeNode ch : children.values()) {
            for (Map.Entry<Integer, EpisodeNode> entry : ch.successors.entrySet()) {
                n.children.get(ch.getName()).successors.put(entry.getKey(), n.children.get(entry.getValue().getName()));
                n.children.get(entry.getValue().getName()).predecessors.put(entry.getKey(), n.children.get(ch.getName()));
            }
        }

        for (ObjectSlot s : slots.values()) {
            n.addSlot(s.getType());
            for (ObjectNode o : s.getUsedObjects()) {
                // Note: This is a potential leak. Objects are global, and backreference the nodes that use them,
                // whether themselves permanent or not.
                n.slots.get(s.getType()).addObject(o, false, temporary);
            }
        }

        assert validateNode(children.values()) : id;
        return n;
    }

    /**
     * Deletes the node and all its subnodes. Recursively calls delete node
     * on all children and on the node itself.
     *
     * @param   episodeDeleted  True if this is part of process of deleting
     * an entire episode, False otherwise.
     */
    public void fullDelete(boolean episodeDeleted) {
        Collection<EpisodeNode> nodes = new HashSet<EpisodeNode>();
        nodes.addAll(children.values());
        for (EpisodeNode n : nodes) {
            n.fullDelete(episodeDeleted);
        }
        deleteNode(episodeDeleted, false);
    }

    public void deleteNode() {
        deleteNode(false, false);
    }

    public void deleteNode(boolean skipChecks, boolean thorough) {
        if (!skipChecks) {
            assert validateNode(this) : id;
            assert validateNode(parent) : id;
            assert validateNode(children.values()) : id;
        }

        //deleting slots -> objects will be moved to "Other" slot of parent node
        String otherType = "Other";
        Collection<ObjectSlot> col = new HashSet<ObjectSlot>();
        col.addAll(slots.values());
        for (ObjectSlot s : col) {
            if (s.getType().equals(otherType)) continue;
            if (thorough) {
                // Thorough deletion is necessary in case of temporary episodes
                // created on-the-fly. If we don't empty the slot, this node will
                // remain permanently linked with it and will leak memory.
                s.emptySlot();
            }
            s.deleteSlot();

        }
        ObjectSlot s = slots.get(otherType);
        if (s != null && parent != null) {
            parent.addSlot(otherType);
            ObjectSlot ps = parent.getObjectSlot(otherType);
            for (ObjectNode o : s.getUsedObjects()) {
                if (ps.addObject(o, true)) {
                    EpisodeNode n = this;
                    while (n != null) {
                        //those relocated will be temporarily in both slots. sum will be decreased in emptySlot() method.
                        n.numberOfSubNodesWithObjects++;
                        n = n.getParent();
                    }
                }
            }
            s.emptySlot();
        }
        if (s != null) s.deleteSlot();
        assert (slots.isEmpty());

        if (parent == null) {
            root.deleted = true;
            if (children.isEmpty() && !skipChecks) {
                //if this is last/only node in entire episode
                root.getParentChronobag().deleteEpisode(root);
            }
            return; //we do not delete root unless it has no children
        }

        if (!skipChecks) {
            assert validateNode(this) : id;
            assert validateNode(parent) : id;
            assert validateNode(children.values()) : id;
        }

        //slots are processed, we need to reorganize parent/siblings/children connections

        //three steps are needed:
        // - attach children to their grandfather (this->parent)
        // - merge children with siblings if they are same node
        // - merge children sequence with siblings sequence
        //will do sequence first, because it can be done separately, otherwise we would
        //have to do all three at once and it would probably result in inconsistent state

        //merge children sequence with siblings sequence
        removeSequenceReferences(true);

        //actual moving of nodes + merging when required
        parent.children.remove(name);
        Collection<EpisodeNode> oldChildren = new HashSet<EpisodeNode>();
        oldChildren.addAll(children.values());
        for (EpisodeNode child : oldChildren) {
            if (parent.children.containsKey(child.name)) {
                //child exists among sibligs - will merge sibling with child - child will be consumed
                //need to remove references to consumed, need to do it before merge is called
                //because the delete node called from merge might mess other nodes

                //child.removeSequenceReferences(false);
                parent.getChild(child.name).mergeWith(child);
            } else {
                //child does not exist among siblings - will move it one level up
                parent.children.put(child.name, child);
                child.parent = parent;
            }

        }

        EpisodeNode p = parent;
        while (p != null) {
            p.numberOfSubNodes--;
            p.numberOfSubNodesWithObjects--;
            p = p.parent;
        }

        //final validation - once the node is deleted parent and children must remain valid
        if (!skipChecks) {
            assert validateNode(parent) : id;
            assert validateNode(children.values()) : id;
        }
        return;
    }

    /**
     * This is a subroutine of delete node method.
     * Correctly removes references from predecessors/successors and
     * also removes the node from the first/last child field of parent
     * This method does <strong>not</strong> change any of the parent/child references.
     *
     * @param   moveChildrenUp  When <code>false</code>, predecessor will be connected with successor.
     * When <code>true</code> and node has children, they will be moved in between
     * predecessor and successor. We may want this when deleting node.
     */
    private void removeSequenceReferences(boolean moveChildrenUp) {
        //merge child sequence into siblings -> update predecessors/successors fields
        if (!children.isEmpty() && moveChildrenUp) {
            for (Map.Entry<Integer, EpisodeNode> entry : predecessors.entrySet()) {
                int key = entry.getKey();
                predecessors.get(key).successors.put(key, firstChild.get(key));
                firstChild.get(key).predecessors.put(key, entry.getValue());
            }
            for (Map.Entry<Integer, EpisodeNode> entry : successors.entrySet()) {
                int key = entry.getKey();
                successors.get(key).predecessors.put(key, lastChild.get(key));
                lastChild.get(key).successors.put(key, entry.getValue());
            }
        } else {
            //no children moving - just connect predecessor with successor
            Iterator<Entry<Integer, EpisodeNode>> it = predecessors.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                if (successors.containsKey(entry.getKey())) {
                    entry.getValue().successors.put(entry.getKey(), successors.get(entry.getKey()));
                } else {
                    entry.getValue().successors.remove(entry.getKey());
                }
            }
            it = successors.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                if (predecessors.containsKey(entry.getKey())) {
                    entry.getValue().predecessors.put(entry.getKey(), predecessors.get(entry.getKey()));
                } else {
                    entry.getValue().predecessors.remove(entry.getKey());
                }
            }
        }

        EpisodeNode par = parent;
        while (par != null) {
            //update first child/last child reference of parent node
            Iterator<Entry<Integer, EpisodeNode>> it = par.firstChild.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                if (entry.getValue() == this) {
                    if (!children.isEmpty()) {
                        //set first child as first sibling
                        entry.setValue(firstChild.get(entry.getKey()));
                    } else {
                        if (successors.containsKey(entry.getKey())) {
                            //no children -> set successor as first sibling
                            entry.setValue(successors.get(entry.getKey()));
                        } else {
                            //no children or successors (=no siblings) -> will be no first child
                            it.remove();
                        }
                    }
                }
            }
            it = par.lastChild.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                if (entry.getValue() == this) {
                    if (!children.isEmpty()) {
                        //set first child as first sibling
                        entry.setValue(lastChild.get(entry.getKey()));
                    } else {
                        if (predecessors.containsKey(entry.getKey())) {
                            //no children -> set successor as first sibling
                            entry.setValue(predecessors.get(entry.getKey()));
                        } else {
                            //no children or successors (=no siblings) -> will be no first child
                            it.remove();
                        }
                    }
                }
            } //sequence and first/last child is set
            par = par.parent;
        }

        predecessors.clear();
        successors.clear();
    }

    boolean validateNode(Collection<EpisodeNode> col) {
        for (EpisodeNode n : col) {
            if (!validateNode(n)) return false;
        }
        return true;
    }

    boolean validateNode(EpisodeNode n) {
        if (n == null) return true;

        EpisodeNode par = n.getParent();
        while (par != null) {
            if (par == n) {
                System.err.println("Loop in the episode tree: " + n.id);
                return false;
            }
            par = par.parent;
        }

        if (associatedNode != null) {
            if (!associatedNode.getName().equals(name)) {
                System.err.println("Associated node name inconsistency: " + n.id);
                return false;
            }

            if (associatedNode.parent == null && parent != null) {
                System.err.println("Invalid parent: " + n.id);
                return false;
            }
            if (associatedNode.parent != null && parent == null) {
                System.err.println("Missing parent: " + n.id);
                return false;
            }
        }

        for (EpisodeNode ch : n.children.values()) {
            if (ch.parent != n) {
                System.err.println("Child-Parent inconsistency: " + n.id);
                return false;
            }
            if (ch == n) {
                System.err.println("Self-child inconsistency: " + n.id);
                return false;
            }
        }

        if (!n.children.isEmpty()) {
            for (EpisodeNode ch : n.firstChild.values()) {
                if (!n.children.containsValue(ch)) {
                    System.err.println("First child inconsistency: " + n.id);
                    return false;
                }
            }
            for (EpisodeNode ch : n.lastChild.values()) {
                if (!n.children.containsValue(ch)) {
                    System.err.println("Last child inconsistency: " + n.id);
                    return false;
                }
            }
        }

        for (EpisodeNode p : n.predecessors.values()) {
            if (!p.successors.containsValue(n)) {
                System.err.println("Predecessor-Successor inconsistency: " + n.id);
                return false;
            }
            if (p.parent.id != n.parent.id) {
                System.err.println("Predecessor-Parent inconsistency: " + n.id);
                return false;
            }
            if (!p.parent.children.containsValue(p)) {
                System.err.println("Predecessor-Brother inconsistency: " + n.id);
                return false;
            }
        }
        for (EpisodeNode s : n.successors.values()) {
            if (!s.predecessors.containsValue(n)) {
                System.err.println("Successor-Predecessor inconsistency: " + n.id);
                return false;
            }
            if (s.parent.id != n.parent.id) {
                System.err.println("Successor-Parent inconsistency: " + n.id);
                return false;
            }
            if (!s.parent.children.containsValue(s)) {
                System.err.println("Successor-Brother inconsistency: " + n.id);
                return false;
            }
        }

        if (n.parent != null) {
            if (!n.parent.children.containsValue(n)) {
                System.err.println("Parent-Child inconsistency: " + n.id);
                return false;
            }
            if (n.predecessors.isEmpty()) {
                if (!n.parent.firstChild.containsValue(n)) {
                    System.err.println("Parent-First child inconsistency: " + n.id);
                    return false;
                }
            }
            if (n.successors.isEmpty()) {
                if (!n.parent.lastChild.containsValue(n)) {
                    System.err.println("Parent-Last child inconsistency: " + n.id);
                    return false;
                }
            }
        }

        for (Integer i : n.firstChild.keySet()) {
            if (!n.lastChild.containsKey(i)) {
                System.err.println("First child - last child set inconsistency: " + n.id);
                return false;
            }
            if (n.firstChild.get(i) == n.lastChild.get(i)) {
                if (n.firstChild.get(i).successors.containsKey(i)) {
                    System.err.println("First child single sequence inconsistency: " + n.id);
                    return false;
                }
            }
            if (n.firstChild.get(i) != n.lastChild.get(i)) {
                if (!n.firstChild.get(i).successors.containsKey(i)
                        || n.firstChild.get(i).successors.get(i) == null) {
                    System.err.println("First child multi sequence inconsistency: " + n.id);
                    return false;
                }
            }
        }
        for (Integer i : n.lastChild.keySet()) {
            if (!n.firstChild.containsKey(i)) {
                System.err.println("Last child - first child set inconsistency: " + n.id);
                return false;
            }
            if (n.firstChild.get(i) == n.lastChild.get(i)) {
                if (n.lastChild.get(i).predecessors.containsKey(i)) {
                    System.err.println("Last child single sequence inconsistency: " + n.id);
                    return false;
                }
            }
            if (n.firstChild.get(i) != n.lastChild.get(i)) {
                if (!n.lastChild.get(i).predecessors.containsKey(i)
                        || n.lastChild.get(i).predecessors.get(i) == null) {
                    System.err.println("Last child multi sequence inconsistency: " + n.id);
                    return false;
                }
            }
        }

        return true;
    }

    private double nodeDerivability() {
        validateNode(this);

        // We don't want to modify this episode, let's make a safe copy.
        Chronobag chron = new Chronobag(root.idGen, root.mem.chrono);
        Episode testEpisode = root.createCopy(chron, true);

        ArrayList<String> list = new ArrayList<String>();
        EpisodeNode n = parent;
        list.add(name);
        while (n != null) {
            list.add(0, n.getName());
            n = n.parent;
        }
        n = testEpisode.getRoot();
        list.remove(0); //removes the root name - we never derive root.
        Iterator<String> it = list.iterator();
        while (it.hasNext()) {
            n = n.getChild(it.next());
        }
        // NOTE: Need to use "thorough" delete here to permanently kill even
        // the object nodes. Similar to createCopy, not doing so would leak
        // memory at extreme pace.

        EpisodeNode immediateParent = n.parent;
        n.deleteNode(false, true);

        // Now episode2 = episode1 without this node. Let's see if we can get it
        // back just using the schema.

        // Collect all nodes that currently contribute to this episode.
        Collection<SchemaEpisodeNode> schemaENodes = new ArrayList<SchemaEpisodeNode>();
        Collection<SlotContent> schemaContents = new ArrayList<SlotContent>();
        // Walk through the episode to collect info
        Stack<EpisodeNode> stack = new Stack<EpisodeNode>();
        SchemaSlot sSlot;
        EpisodeNode stackNode;
        stack.add(immediateParent.root.root);
        while (!stack.isEmpty()) {
            stackNode = stack.pop();
            if (stackNode.associatedNode == null) {
                continue;
            }
            if (!stackNode.equals(this)) {
                // We don't want this node contributing to the list of nodes.
                schemaENodes.add(stackNode.associatedNode.getAssociatedNode());
            }
            for (ObjectSlot slot : stackNode.getObjectSlots()) {
                if (slot.getType().equals("Other")) {
                    continue;
                }
                sSlot = slot.getParentNode().associatedNode.getAssociatedNode().getSlot(slot.getType());
                for (ObjectNode obj : slot.getUsedObjects()) {
                    schemaContents.add(sSlot.getSlotContent(obj.getName()));
                }
            }
            stack.addAll(stackNode.getChildrenNodes());
        }

        EpisodeNode wouldAdd = immediateParent.deriveChildEpisodicNode(0, 0, schemaENodes, schemaContents);
        if ((wouldAdd != null) && (wouldAdd.name.equals(this.name))) {
            // Is it the same node?
            return 1;
        } else {
            return 0;
        }
    }

    /**
     * Recursively sets parent <code>Episode</code> for this node and all its subnodes
     * @param newRoot   Reference to the new <code>Episode</code> structure.
     */
    private void setRoot(Episode newRoot) {
        root = newRoot;
        for (EpisodeNode n : children.values()) {
            n.setRoot(newRoot);
        }
    }

    /**
     * Merges current node with other node. Both nodes must represent same action/goal.
     * New node will be the union of this node and node in parameter.
     * <strong>Node in parameter (other node) will be destroyed in the process.</strong>
     *
     * @param other Node to be merged to current one. (will be destroyed during process)
     * @return A reference to the merged node = this, once other node is merged into this one.
     */
    public EpisodeNode mergeWith(EpisodeNode other) {
        assert (this.getName().equals(other.getName()));
        //assert (validateNode(this));
        //assert (validateNode(other));

        other.consumed = true;
        Collection<EpisodeNode> copiedChildren = new HashSet<EpisodeNode>();
        for (EpisodeNode otherChild : other.getChildrenNodes()) {
            if (!children.containsKey(otherChild.getName())) {
                //children do not contain otherChild
                copiedChildren.add(otherChild);
                //actual transfer will be performed once all brothers are merged,
                // otherwise siblings will be referencing partially transfered (without
                // updated references) nodes at the beginning of merge process
            }
        }

        for (Map.Entry<Integer, EpisodeNode> entry: other.firstChild.entrySet()) {
            int key = entry.getKey();
            Collection<EpisodeNode> sequenced = new HashSet<EpisodeNode>();
            EpisodeNode n = firstChild.get(key);
            while (n != null) {
                sequenced.add(n);
                n = n.getSuccessor().get(key);
            }

            n = entry.getValue();
            EpisodeNode next = null;
            other.lastChild.put(key, null);
            while (n != null) {
                next = n.getSuccessor().get(key);
                if (copiedChildren.contains(n)) {
                    //actual transfer of other child
                    children.put(n.getName(), n);
                    other.children.remove(n.getName());
                    n.parent = this;
                    n.setRoot(root);

                    //child was transferred under this node, cannot be referenced from other node
                    if (entry.getValue() == n) {
                        entry.setValue(next);
                    }
                    if (n.predecessors.get(key) != null) {
                        assert (n.predecessors.get(key).successors.get(key) == n);
                        if (n.successors.get(key) != null) {
                            n.predecessors.get(key).successors.put(key, n.successors.get(key));
                        } else {
                            n.predecessors.get(key).successors.remove(key);
                        }
                        //n.predecessors.put(key, children.get(n.predecessors.get(key).getName()));
                    }
                    if (n.successors.get(key) != null) {
                        assert (n.successors.get(key).predecessors.get(key) == n);
                        if (n.predecessors.get(key) != null) {
                            n.successors.get(key).predecessors.put(key, n.predecessors.get(key));
                        } else {
                            n.successors.get(key).predecessors.remove(key);
                        }
                        //n.successors.put(key, children.get(n.successors.get(key).getName()));
                    }
                } else {
                    assert (n.parent == other);
                    //child was not transfered, still is under other node
                    other.lastChild.put(key, n);

                }
                n = children.get(n.getName());
                if (!sequenced.contains(n)) {
                    if (lastChild.get(key) != null) {
                        assert (lastChild.get(key).successors.get(key) == null);
                        n.predecessors.put(key, lastChild.get(key));
                        lastChild.get(key).successors.put(key, n);
                    }
                    if (!firstChild.containsKey(key)) {
                        assert (!lastChild.containsKey(key));
                        firstChild.put(key, n);
                        n.predecessors.remove(key);
                    }
                    n.successors.remove(key);
                    lastChild.put(key, n);
                }
                sequenced.add(n);
                n = next;
                if (n != null) next = n.getSuccessor().get(key);
            }
        }

        //TODO? maybe children can be sorted according timestamps -> now new children are just added at the end

        //in method delete node, child can be merged into sibling
        //in this case we want to make sure parent points to the sibling node, not the consumed one
     /*   if (other.parent != null) {
            for (Map.Entry<Integer, EpisodeNode> entry: other.parent.firstChild.entrySet()) {
                if (entry.getValue() == other) {
                    entry.setValue(other.parent.getChild(other.getName()));
                }
            }
            for (Map.Entry<Integer, EpisodeNode> entry: other.parent.lastChild.entrySet()) {
                if (entry.getValue() == other) {
                    entry.setValue(other.parent.getChild(other.getName()));
                }
            }
        }*/

        //first/last child null references should be deleted (no entry episID->null) should exist
        Iterator<Entry<Integer, EpisodeNode>> i = other.firstChild.entrySet().iterator();
        while (i.hasNext()) {
            if (i.next().getValue() == null) i.remove();
        }
        i = other.lastChild.entrySet().iterator();
        while (i.hasNext()) {
            if (i.next().getValue() == null) i.remove();
        }

        ObjectSlot s = null;
        for (Iterator<Map.Entry<String, ObjectSlot>> iter = other.slots.entrySet().iterator(); iter.hasNext();) {
            s = iter.next().getValue();
            if (!slots.containsKey(s.getType())) {
                addSlot(s.getType());
            }
            ObjectSlot slot = slots.get(s.getType());
            for (ObjectNode o : s.getUsedObjects()) {
                slot.addObject(o, true);
            }
            s.emptySlot();
            iter.remove();
        }
        Collection<EpisodeNode> otherChildren = new HashSet<EpisodeNode>();
        otherChildren.addAll(other.getChildrenNodes());
        for (EpisodeNode otherChild : otherChildren) {
            if (children.containsKey(otherChild.getName())) {
                // Merge with otherChildren
                children.get(otherChild.getName()).mergeWith(otherChild);
            }
        }

        //other.removeSequenceReferences(false);
        other.fullDelete(true);

        numberOfSubNodes = 0;
        numberOfSubNodesWithObjects = 0;
        for (EpisodeNode child : children.values()) {
            numberOfSubNodes += child.numberOfSubNodes + 1;
            numberOfSubNodesWithObjects += child.numberOfSubNodesWithObjects + 1;
        }
        for (ObjectSlot slot : slots.values()) {
            numberOfSubNodesWithObjects += slot.getUsedObjects().size();
        }

        //assert (validateNode(this));
        return this;
    }

    public EpisodeNode deriveNodeTrace() {
        //has to be performed for all sibling nodes otherwise the episode may be corrupted
        if (associatedNode.parent == parent.associatedNode) {
            System.out.println("-1 assoc " + associatedNode.getName());
            System.out.println("-2 this  " + this.getName());
            System.out.println("-3 par   " + parent.getName());
            System.out.println("-4 a.par " + associatedNode.parent.getName());
            System.out.println("-5 par.a " + parent.associatedNode.getName());
        }
        assert(associatedNode.parent != parent.associatedNode);
        assert(validateNode(this));
        assert(validateNode(parent));

        boolean added;

        if (associatedNode.parent == null) {
            System.out.println("-1 " + associatedNode.getName());
            System.out.println("-2 " + this.getName());
            System.out.println("-3 " + parent.getName());
        }
        EpisodeNode newNode = getParent().getChild(associatedNode.parent.getName());
        if (newNode == null) {
            //new parent will be created
            newNode = new EpisodeNode(associatedNode.parent.getName(), getParent(), root, associatedNode.parent);
            added = true;

            //integrate new parent with current siblings
            for (Map.Entry<Integer, EpisodeNode> entry : successors.entrySet()) {
                assert (entry.getValue().predecessors.get(entry.getKey()) == this);
                entry.getValue().predecessors.put(entry.getKey(), newNode);
                newNode.successors.put(entry.getKey(), entry.getValue());
            }
            successors.clear();
            for (Map.Entry<Integer, EpisodeNode> entry : predecessors.entrySet()) {
                assert (entry.getValue().successors.get(entry.getKey()) == this);
                entry.getValue().successors.put(entry.getKey(), newNode);
                newNode.predecessors.put(entry.getKey(), entry.getValue());
            }
            predecessors.clear();
            //update first child/last child references for old and new parent
            for (Map.Entry<Integer, EpisodeNode> entry : parent.firstChild.entrySet()) {
                if (entry.getValue() == this) {
                    entry.setValue(newNode);
                }
                newNode.firstChild.put(entry.getKey(), this);
            }
            for (Map.Entry<Integer, EpisodeNode> entry : parent.lastChild.entrySet()) {
                if (entry.getValue() == this) {
                    entry.setValue(newNode);
                }
                newNode.lastChild.put(entry.getKey(), this);
            }

            parent.children.remove(this.name);
            parent.children.put(newNode.name, newNode);

            this.parent = newNode;
            newNode.children.put(this.name, this);
        } else {
            assert(validateNode(newNode));
            //new parent already exists (was created for some sibling).
            //if necessary following situations can be seen in following scenario:
            //root nodes with three subnodes, middle node has four subnodes. delete middle subnode.
            //then derive it for second of four nodes.
            //following situations describe deriving a parent for three remaining subnodes.

            //will disconnect this node from the tree, create a new parent for it
            //and then merge this new parent with equivalent node already in the episode tree

            EpisodeNode tempParent = new EpisodeNode(associatedNode.parent.getName(), null, root, associatedNode.parent);
            added = false;

            tempParent.children.put(name, this);
            Iterator<Entry<Integer, EpisodeNode>> it = parent.firstChild.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                if (entry.getValue() == this) {
                    entry.setValue(successors.get(entry.getKey()));
                }
                tempParent.firstChild.put(entry.getKey(), this);
                if (entry.getValue() == null) it.remove();
            }
            it = parent.lastChild.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                if (entry.getValue() == this) {
                    entry.setValue(predecessors.get(entry.getKey()));
                }
                tempParent.lastChild.put(entry.getKey(), this);
                if (entry.getValue() == null) it.remove();
            }

            it = predecessors.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                assert (entry.getValue().successors.get(entry.getKey()) == this);
                if (successors.containsKey(entry.getKey())) {
                    entry.getValue().successors.put(entry.getKey(), successors.get(entry.getKey()));
                } else {
                    entry.getValue().successors.remove(entry.getKey());
                }
            }
            it = successors.entrySet().iterator();
            while (it.hasNext()) {
                Entry<Integer, EpisodeNode> entry = it.next();
                assert (entry.getValue().predecessors.get(entry.getKey()) == this);
                if (predecessors.containsKey(entry.getKey())) {
                    entry.getValue().predecessors.put(entry.getKey(), predecessors.get(entry.getKey()));
                } else {
                    entry.getValue().predecessors.remove(entry.getKey());
                }
            }
            predecessors.clear();
            successors.clear();

            parent.children.remove(this.name);
            parent = tempParent;

            assert(validateNode(newNode));
            newNode.mergeWith(tempParent);
            assert(validateNode(newNode));
        }

        if (added) {
            EpisodeNode n = parent;
            while (n != null) {
                n.numberOfSubNodes++;
                n.numberOfSubNodesWithObjects++;
                n = n.parent;
            }
            newNode.numberOfSubNodes = numberOfSubNodes + 1;
            newNode.numberOfSubNodesWithObjects = numberOfSubNodesWithObjects + 1;
        } else {
            newNode.numberOfSubNodes += numberOfSubNodes + 1;
            newNode.numberOfSubNodesWithObjects += numberOfSubNodesWithObjects + 1;
        }

        assert(associatedNode.parent.getName().equals(parent.associatedNode.getName()));

        assert(validateNode(newNode));
        assert(validateNode(newNode.getChildrenNodes()));
        assert(validateNode(newNode.parent));
   //     assert(validateNode(this)) : id;
   //     assert(validateNode(parent));
   //     assert(validateNode(parent.parent));
        return newNode;
    }

    // Attempt to find a child that can be added to the episode, given schema node cues.
    public EpisodeNode deriveChildEpisodicNode(int level, int index, Collection<SchemaEpisodeNode> schemaNodeCues, Collection<SlotContent> schemaContentCues) {
        SchemaEpisodeNode newSchemaChild = null;
        Collection<SchemaEpisodeNode> possibleChildren = new HashSet<SchemaEpisodeNode>();
        if (associatedNode.getAssociatedNode() == null) {
            SchemaEpisodeNode s = root.getParentChronobag().chrono.mem.schemas.getSchemaENode(name);
            associatedNode.setAssociatedSchemaNode(s);
        }
        schemaNodeCues.remove(null);
        possibleChildren.addAll(associatedNode.getAssociatedNode().getChildrenSchemaNodes());
        for (EpisodeNode n : children.values()) {
            if (n.associatedNode != null) {
                possibleChildren.remove(n.associatedNode.getAssociatedNode());
            }
        }
/*        Collection<SlotContent> possibleContents = new HashSet<SlotContent>();
        for (SchemaEpisodeNode traceNode : schemaNodeCues) {
            for (SchemaSlot sSlot : traceNode.getSlots()) {
                for (SlotContent slotContent : sSlot.getSlotContents()) {
                    if (!schemaContentCues.contains(slotContent)) {
                        possibleContents.add(slotContent);
                    }
                }
            }
        }*/
        SchemaBag schemas = root.mem.schemas;

        Collection<SchemaCounter> counts;
        // Counts describe all the possible ways the current set of nodes can be extended.
        counts = schemas.schemaCounterSupersetSelector(level, index, schemaNodeCues, schemaContentCues);

        int total, maxTotal = 0;
        Collection<SchemaEpisodeNode> schemaNodeCollection = new HashSet<SchemaEpisodeNode>();
        schemaNodeCollection.addAll(schemaNodeCues);

        if (associatedNode.getType() == NodeType.INTENTION || children.isEmpty()) {
            // Find a most likely child to be included.
            for (SchemaEpisodeNode n : possibleChildren) {
                HashSet<SchemaCounter> countsFiltered = new HashSet<SchemaCounter>(counts);
                ArrayList<SchemaCounter> countsSorted;

                ArrayList<SchemaEpisodeNode> added = new ArrayList<SchemaEpisodeNode>();
                added.add(n);
                int maxSchema;
                if (schemaNodeCues.contains(n)) {
                    maxSchema = schemaNodeCues.size();
                } else {
                    countsFiltered = schemas.schemaCounterSupersetSelectorOne(level, index, countsFiltered, added, new ArrayList<SlotContent>());
                    maxSchema = schemaNodeCues.size()+1;
                }
                countsFiltered = schemas.schemaCounterSubsetMinsizeFilter(countsFiltered, maxSchema, schemaContentCues.size());
                if (countsFiltered.isEmpty()) {
                    // Empty counts list
                    continue;
                }
                countsSorted = schemas.schemaCounterSort(countsFiltered);
                SchemaCounter includedCounter = countsSorted.get(0);

                total = includedCounter.getCount();

                if (total > maxTotal) {
                    newSchemaChild = n;
                    maxTotal = total;
                }
            }
            if (newSchemaChild != null) {
                schemaNodeCollection.add(newSchemaChild);
            }
        }
        if (newSchemaChild == null) {
            // No new schema child.
            return null;
        }

        /* FIXME: No object deriving.
        Collection<SlotContent> slotContentsCollecion = new HashSet<SlotContent>();
        for (SchemaCounter count : counts) {
            total = count.getCount();
            slotContentsCollecion.clear();
            slotContentsCollecion.addAll(count.objectNodes);
            SlotContent c;
            for (Iterator<SlotContent> iter = possibleContents.iterator(); iter.hasNext();) {
                c = iter.next();
                slotContentsCollecion.add(c);
                if (slotContentsCollecion.size() + count.episodeNodes.size() > root.mem.parameters.MAX_SCHEMA_COMBINATION_COUNT) {
                    continue;
                }
                includedCounter = schemas.getCounter(count.episodeNodes, slotContentsCollecion, root.mem.parameters.MAX_SCHEMA_COMBINATION_COUNT);
                if (includedCounter == null) {
                    continue;
                }
                includedTotal = includedCounter.getCount();
                slotContentsCollecion.remove(c);
                perc = (double)includedTotal / total;
                if (perc > root.mem.parameters.PERCENTAGE_TO_DERIVE_OBJECT) {

                    //derive object
                    ObjectSlot slot = null;
                    EpisodeNode slotParent = this;
                    while (slotParent != null) {
                        if (slotParent.associatedNode.getAssociatedNode().getSlots().contains(c.getSlot())) {
                            break;
                        }
                        slotParent = slotParent.parent;
                    }
                    assert (slotParent != null);
                    if (slotParent == null) {
                        System.err.println("Could not derive object - cannot find parent node.");
                    } else {
                        slot = getObjectSlot(c.getSlot().getType());
                        if (slot == null) {
                            //slot = new ObjectSlot(root.idGen.getNewId(), c.getSlot().getType(), slotParent);
                            slotParent.addSlot(c.getSlot().getType());
                            slot = slotParent.getObjectSlot(c.getSlot().getType());

                            ObjectNode o = root.mem.chrono.getPresentChronobag().objectNodes.get(c.getObject().getName());
                            while (slotParent != null) {
                                if (slotParent.getObjectSlot("Other") != null) {
                                    if (slotParent.getObjectSlot("Other").getUsedObjects().contains(o)) {
                                        slotParent.getObjectSlot("Other").remove(o);
                                    }
                                }
                                slotParent = slotParent.getParent();
                            }
                        }
                        slot.addObject(root.mem.chrono.getPresentChronobag().createObjectNode(c.getObject().getName()), true);
                        iter.remove();
                    }

                }
            }
        }*/

        //This attaches the new node as the last child. So far it is OK because the schemabag does not remember seqeunces anyway.
        addChildNode(newSchemaChild.getName(), -1);

        return children.get(newSchemaChild.getName());
    }

    /*
    // Attempt to find a child that can be added to the episode, given schema node cues.
    public SlotContent deriveChildObjectNode(Collection<SchemaEpisodeNode> traceSchemaNodes, Collection<SlotContent> schemaContents) {
        SchemaEpisodeNode newSchemaChild = null;

        Collection<SlotContent> possibleContents = new HashSet<SlotContent>();
        for (SchemaEpisodeNode traceNode : traceSchemaNodes) {
            for (SchemaSlot sSlot : traceNode.getSlots()) {
                for (SlotContent slotContent : sSlot.getSlotContents()) {
                    if (!schemaContents.contains(slotContent)) {
                        possibleContents.add(slotContent);
                    }
                }
            }
        }
        SchemaBag schemas = root.mem.schemas;

        Collection<SchemaCounter> counts;
        // Counts describe all the possible ways the current set of nodes can be extended.
        counts = schemas.schemaCounterSupersetSelector(traceSchemaNodes, schemaContents);

        int total, maxTotal = 0;
        Collection<SchemaEpisodeNode> schemaNodeCollection = new HashSet<SchemaEpisodeNode>();
        schemaNodeCollection.addAll(traceSchemaNodes);

/*        if (associatedNode.getType() == NodeType.INTENTION || children.isEmpty()) {
            // Find a most likely child to be included.
            for (SchemaEpisodeNode n : possibleChildren) {
                HashSet<SchemaCounter> countsFiltered = new HashSet<SchemaCounter>(counts);
                ArrayList<SchemaCounter> countsSorted;

                ArrayList<SchemaEpisodeNode> added = new ArrayList<SchemaEpisodeNode>();
                added.add(n);
                countsFiltered = schemas.schemaCounterSupersetSelectorOne(countsFiltered, added, new ArrayList<SlotContent>());
                countsFiltered = schemas.schemaCounterSubsetMaxsizeFilter(countsFiltered, traceSchemaNodes.size()+1, schemaContents.size());
                countsFiltered = schemas.schemaCounterSubsetMinsizeFilter(countsFiltered, traceSchemaNodes.size()+1, schemaContents.size());
                countsSorted = schemas.schemaCounterSort(countsSorted);
                SchemaCounter includedCounter = countsSorted.get(0);

                total = includedCounter.getCount();

                if (total > maxTotal) {
                    newSchemaChild = n;
                    maxTotal = total;
                }
            }
            if (newSchemaChild != null) {
                schemaNodeCollection.add(newSchemaChild);
            } else {
                // No new schema child.
                return null;
            }
        }*/
        /*
        Collection<SlotContent> slotContentsCollecion = new HashSet<SlotContent>();
        for (SchemaCounter count : counts) {
            total = count.getCount();
            slotContentsCollecion.clear();
            slotContentsCollecion.addAll(count.objectNodes);
            SlotContent c;
            for (Iterator<SlotContent> iter = possibleContents.iterator(); iter.hasNext();) {
                c = iter.next();
                slotContentsCollecion.add(c);
                if (slotContentsCollecion.size() + count.episodeNodes.size() > root.mem.parameters.MAX_SCHEMA_COMBINATION_COUNT) {
                    continue;
                }
                includedCounter = schemas.getCounter(count.episodeNodes, slotContentsCollecion, root.mem.parameters.MAX_SCHEMA_COMBINATION_COUNT);
                if (includedCounter == null) {
                    continue;
                }
                includedTotal = includedCounter.getCount();
                slotContentsCollecion.remove(c);
                perc = (double)includedTotal / total;
                if (perc > root.mem.parameters.PERCENTAGE_TO_DERIVE_OBJECT) {

                    //derive object
                    ObjectSlot slot = null;
                    EpisodeNode slotParent = this;
                    while (slotParent != null) {
                        if (slotParent.associatedNode.getAssociatedNode().getSlots().contains(c.getSlot())) {
                            break;
                        }
                        slotParent = slotParent.parent;
                    }
                    assert (slotParent != null);
                    if (slotParent == null) {
                        System.err.println("Could not derive object - cannot find parent node.");
                    } else {
                        slot = getObjectSlot(c.getSlot().getType());
                        if (slot == null) {
                            //slot = new ObjectSlot(root.idGen.getNewId(), c.getSlot().getType(), slotParent);
                            slotParent.addSlot(c.getSlot().getType());
                            slot = slotParent.getObjectSlot(c.getSlot().getType());

                            ObjectNode o = root.mem.chrono.getPresentChronobag().objectNodes.get(c.getObject().getName());
                            while (slotParent != null) {
                                if (slotParent.getObjectSlot("Other") != null) {
                                    if (slotParent.getObjectSlot("Other").getUsedObjects().contains(o)) {
                                        slotParent.getObjectSlot("Other").remove(o);
                                    }
                                }
                                slotParent = slotParent.getParent();
                            }
                        }
                        slot.addObject(root.mem.chrono.getPresentChronobag().createObjectNode(c.getObject().getName()), true);
                        iter.remove();
                    }

                }
            }
        }

        //This attaches the new node as the last child. So far it is OK because the schemabag does not remember seqeunces anyway.
        addChildNode(newSchemaChild.getName(), -1);

        return children.get(newSchemaChild.getName());
    }*/

    public int getNumberOfSubNodes() {
        return numberOfSubNodes;
    }

    public int getNumberOfSubNodesWithObjects() {
        return numberOfSubNodesWithObjects;
    }

    public int getStartTime() {
        // Return time in minutes
        return (int)Math.floor(timeStart / 60);
    }

    /**
     * Compares two episode nodes based on the timeStart attribute.
     */
    @Override
    public int compareTo(EpisodeNode o) {
        return this.getStartTime() - o.getStartTime() ;
    }

    public void recalculateTreeSize(boolean validate) {
        int origSubNodes = numberOfSubNodes;
        int origSubNodesWithObjects = numberOfSubNodesWithObjects;

        numberOfSubNodes = children.size();
        numberOfSubNodesWithObjects = children.size();
        for (ObjectSlot s : slots.values()) {
            numberOfSubNodesWithObjects += s.getUsedObjects().size();
        }
        for (EpisodeNode e : children.values()) {
            e.recalculateTreeSize(validate);
            numberOfSubNodes += e.numberOfSubNodes;
            numberOfSubNodesWithObjects += e.numberOfSubNodesWithObjects;
        }

        if (validate) {
            assert (origSubNodes == numberOfSubNodes);
            assert (origSubNodesWithObjects == numberOfSubNodesWithObjects);
        }
    }
}


