#include "stdafx.h"
#include "WordChoiceGene.h"
#include "OperationTreeNode.h"
#include "MemberVariableTreeNode.h"
#include "ConstantTreeNode.h"

using CollabRC::Bot::AI::WordChoiceGene;
using CollabRC::Bot::AI::ComputationalTreeNode;
using CollabRC::Bot::AI::Gene;

#include "./mocks.h"

/**
 * @brief Creates a new WordChoiceGene object which computes on the specified set
 * @param set the set of words to monitor
 */
WordChoiceGene::WordChoiceGene(const CollabRC::Bot::WordSet& set):m_algorithm(WordChoiceGene::CreateRandomDefaultAlgorithm())
{
    // Update the algorithm
    UpdateAlgorithmVisitor visitor(*this);
    m_algorithm->Accept(visitor);

    // Go through each element in the word set and assign it a weight of 1
    set.Reset();
    while (set.NextWord())
    {
        m_wordSet.insert(set.GetCurrentWord(), 1.0f);
    }
}

/**
 * @brief Creates a new WordChoiceGene which uses the same word set and
 * weights as the specified WordChoiceGene object
 * @param copy the WordChoiceGene object from which to copy the word set and weights
 */
WordChoiceGene::WordChoiceGene(const WordChoiceGene& copy):m_wordSet(copy.m_wordSet),m_algorithm(copy.m_algorithm->Clone())
{
    // Update the algorithm
    UpdateAlgorithmVisitor visitor(*this);
    m_algorithm->Accept(visitor);
}

/**
 * @brief Constructor for internal use only which creates a new WordChoiceGene
 * object using the specified computational algorithm.
 * @param algorithm the computation algorithm to use to compute the result
 * @warning This constructor does NOT clone the algorithm; it takes the pointer directly and
 * will delete it on destruction
 */
WordChoiceGene::WordChoiceGene(ComputationalTreeNode* algorithm):m_algorithm(algorithm)
{
    // Update the algorithm
    UpdateAlgorithmVisitor visitor(*this);
    m_algorithm->Accept(visitor);
}

/** @brief Destroys this WordChoiceGene object and associated resources */
WordChoiceGene::~WordChoiceGene(void)
{
    delete m_algorithm;
}

/**
 * @brief Computes a weighted sum score for the specified Diff.
 * 
 * The weighted sum is dependent upon the words in the word list, the number
 * of occurrences of the word in the diff, and the current word weights
 * in the word list. The result is the summation of the weights
 * times the number of occurrences of each word.
 * @param diff the diff from which the weighted sum should be computed
 * @return the weighted sum computed from the word list and the specified Diff
 */
float WordChoiceGene::ComputeWeightedSum(const CollabRC::Bot::Diff& diff) const
{
    const WordSet& set = diff.GetAddedWords();
    set.Reset();
    float total = 0;
    while(set.NextWord())
    {
        const QString& word = set.GetCurrentWord();
        if (m_wordSet.contains(word))
        {
            // Add the weight times the number of occurrences
            total += set.GetCurrentWordCount() * m_wordSet.value(word);
        }
    }

    return total;
}

/**
 * @brief Computes the word presence value for this Diff object.
 * 
 * The word presence is a value on the interval [0, 1] indicating
 * the proportion of words in the word list that are matched by this
 * Diff's added words.
 *
 * If the word set contains no words then the returned value will always 
 * be zero, regardless of the contents of the Diff.
 * @param diff the changeset which should be analyzed
 * @value a value on the interval [0, 1] indicating the proportion of words
 * in the word list that are matched by this Diff object
 */
float WordChoiceGene::ComputeWordPresence(const CollabRC::Bot::Diff& diff) const
{
    int size = m_wordSet.count();
    if (size == 0)      // No words in the set
        return 0;

    const WordSet& set = diff.GetAddedWords();
    // Try each added word to see if it's in our list
    set.Reset();
    int count = 0;
    while (set.NextWord())
    {
        if (m_wordSet.contains(set.GetCurrentWord()))
            ++count;
    }

    return static_cast<float>(count) / static_cast<float>(m_wordSet.size());
}

/** {@inheritDoc} */
float WordChoiceGene::Test(const CollabRC::Bot::Diff &diff) const
{
    m_weightedSum = ComputeWeightedSum(diff);
    m_wordPresence = ComputeWordPresence(diff);
    return m_algorithm->Compute();
}

/** {@inheritDoc} */
Gene* WordChoiceGene::Cross(const Gene &gene) const
{
    // If they're both WordChoiceGenes then cross the algorithms and lists
    try 
    {
        const WordChoiceGene &spouse = dynamic_cast<const WordChoiceGene&>(gene);
        
        // Build the new algorithm
        int lhsDepth = m_algorithm->GetBranchDepth();
        const ComputationalTreeNode& lhs = m_algorithm->GetRandomBranch(lhsDepth);
        int rhsDepth = spouse.m_algorithm->GetBranchDepth();
        const ComputationalTreeNode& rhs = spouse.m_algorithm->GetRandomBranch(rhsDepth);

        // Build the new word list
        WordChoiceGene *cross = new WordChoiceGene(OperationTreeNode::Cross(lhs, rhs));
        QHash<QString, float>::const_iterator leftEnd = m_wordSet.end();
        QHash<QString, float>::const_iterator rightEnd = spouse.m_wordSet.end();
        for (QHash<QString, float>::const_iterator left = m_wordSet.begin(), right = spouse.m_wordSet.begin();
             left != leftEnd || right != rightEnd;)
        {
            if (left == leftEnd)
            {
                // Only the right side remains. Take it or nothing.
                if (randint(0, 1))
                {
                    // Take it
                    cross->m_wordSet.insert(right.key(), right.value());
                }
                ++right;
            }
            else if (right == rightEnd)
            {
                // Only the left side remains. Take it or nothing.
                if (randint(0, 1))
                {
                    // Take it
                    cross->m_wordSet.insert(left.key(), left.value());
                }
                ++left;
            }
            else
            {
                // Both sides remain. Randomize.
                switch (randint(0, 3))
                {
                case 0:                     // Take both
                    cross->m_wordSet.insert(left.key(), left.value());
                    cross->m_wordSet.insert(right.key(), right.value());
                    break;
                case 1:                     // Take left only
                    cross->m_wordSet.insert(left.key(), left.value());
                    break;
                case 2:                     // Take right only
                    cross->m_wordSet.insert(right.key(), right.value());
                    break;
                // Default (3): take nothing
                };
                ++left;
                ++right;
            }
        }

        return cross;
    }
    catch (const std::bad_cast&)
    {
        // Not the same gene, so pick one and mutate it
        if (randint(0, 1))
        {
            // Pick this
            return Mutate();
        }
        else
        {
            // Pick other
            return gene.Mutate();
        }
    }
}

/** {@inheritDoc} */
Gene* WordChoiceGene::Mutate() const
{
    // We must have at least 1 word in the new words list before we do this
    // (This is to avoid having a word list with 0 words in it)
    recentWordsLock.lockForRead();
    bool empty = recentWords.empty();
    recentWordsLock.unlock();
    if (!empty)     // Cannot be done
        return new WordChoiceGene(*this);

    // Assume a = the number of words presently in the word list
    // Assume b = the number of words presently in the "recent" list
    // 1. Pick out a number of operations, n, on the interval [0,a]
    // 2. Choose the drop probability, p, of value n/a
    // 3. Iterate over each element in the word list. Drop the word with probability p
    // 4. Add n random elements to the word list


    WordChoiceGene *newGene = new WordChoiceGene(*this);
    // Step 1: Pick a number of operations
    int n = randint(0, newGene->m_wordSet.size());

    // Step 2: Determine the drop probability
    float p = n / newGene->m_wordSet.size();

    // Step 3: Drop a set of words
    QHash<QString, float>::iterator iter = newGene->m_wordSet.begin();
    while (iter != newGene->m_wordSet.end())
    {
        // Should we drop the word?
        if (frand() < p)
            iter = newGene->m_wordSet.erase(iter);
        else 
            ++iter;
    }

    // Step 4: Add n elements from the recent words list
    recentWordsLock.lockForRead();
    unsigned int size = recentWords.count();
    for (int i = 0; i < n; ++i)
    {
        // Pick an element
        int x = randint(0, size);
        // Choose that element
        QSet<QString>::iterator iter = recentWords.begin();
        while (x--)
            ++iter;

        newGene->m_wordSet.insert(*iter, 1.0f);
        recentWords.erase(iter);
    }
    recentWordsLock.unlock();

    return newGene;
}

QSet<QString> WordChoiceGene::recentWords;
QReadWriteLock WordChoiceGene::recentWordsLock;

WordChoiceGene::UpdateAlgorithmVisitor::UpdateAlgorithmVisitor(CollabRC::Bot::AI::WordChoiceGene &object):m_object(object)
{
}

void WordChoiceGene::UpdateAlgorithmVisitor::Visit(CollabRC::Bot::AI::MemberVariableTreeNode<WordChoiceGene, float>& visitor)
{
    // Update the algorithm's object to this object
    visitor.SetObject(m_object);
}

/**
 * @brief Creates a default algorithm to join the two member variables together
 * 
 * The default algorithm takes the two input variables, joins them with a random
 * constant, and then joins the two branches together with a random operation.
 */
ComputationalTreeNode* WordChoiceGene::CreateRandomDefaultAlgorithm()
{
    MemberVariableTreeNode<WordChoiceGene, float> weightedSum(*this, &WordChoiceGene::m_weightedSum);
    MemberVariableTreeNode<WordChoiceGene, float> wordPresence(*this, &WordChoiceGene::m_wordPresence);

    return OperationTreeNode::Cross(weightedSum, wordPresence);
/*  ConstantTreeNode c1(lrand(LN100));
    ConstantTreeNode c2(lrand(LN100));

    // Join each branch
    ComputationalTreeNode *branch1, *branch2;
    if (randint(0, 1))
        branch1 = OperationTreeNode::GetRandomFactory()->Instantiate(weightedSum, c1);
    else
        branch1 = OperationTreeNode::GetRandomFactory()->Instantiate(c1, weightedSum);

    if (randint(0, 1))
        branch2 = OperationTreeNode::GetRandomFactory()->Instantiate(wordPresence, c2);
    else
        branch2 = OperationTreeNode::GetRandomFactory()->Instantiate(c2, wordPresence);

    // Join the branches together
    ComputationalTreeNode *result;
    if (randint(0, 1))
        result = OperationTreeNode::GetRandomFactory()->Instantiate(*branch1, *branch2);
    else
        result = OperationTreeNode::GetRandomFactory()->Instantiate(*branch2, *branch1);

    delete branch1, branch2;
    return result; */
}



