#include <algorithm>
#include "TokenCollection.h"
#include "../utils/Collection.h"
#include "BisonParser.h"

using namespace kc_sf;

//------------------------------------------------------------------- //

TokenCollection::TokenCollection()
{
}

//------------------------------------------------------------------- //

TokenCollection::TokenCollection( const Token &token )
{
    tc_tokens.push_back( token );
}

//------------------------------------------------------------------- //

TokenCollection::TokenCollection( const_iterator begin, const_iterator end )
{
	std::list<Token >::const_iterator
		tokens_begin = begin,
		tokens_end = end;

	tc_tokens = std::list<Token >( tokens_begin, tokens_end );
}

//------------------------------------------------------------------- //

TokenCollection::TokenCollection( const TokenCollection &_tokens )
{
    tc_tokens = _tokens.tc_tokens;
}

//------------------------------------------------------------------- //

TokenCollection & 
TokenCollection::operator=( const TokenCollection &_tokens )
{
    tc_tokens = _tokens.tc_tokens;
    return *this;
}

//------------------------------------------------------------------- //

void TokenCollection::push_front( const Token &token )
{
    tc_tokens.push_front( token );
}

//------------------------------------------------------------------- //

void TokenCollection::push_back( const Token &token )
{
    tc_tokens.push_back( token );
}

//------------------------------------------------------------------- //

// przed position
void TokenCollection::add( TokenCollection::iterator position, 
                           const Token &token )
{
    tc_tokens.insert( position, token );
}

//------------------------------------------------------------------- //

// przed position
void TokenCollection::concat( TokenCollection::iterator position, 
                              const TokenCollection &_tokens )
{
    tc_tokens.insert( position, _tokens.begin(), _tokens.end() );
}

//------------------------------------------------------------------- //

void TokenCollection::concat( const TokenCollection &_tokens )
{
    for( TokenCollection::const_iterator it = _tokens.begin();
         it != _tokens.end(); ++it )
    {
        tc_tokens.push_back( *it );
    }
}

//------------------------------------------------------------------- //

void TokenCollection::pop_front()
{
    tc_tokens.pop_front();
}

//------------------------------------------------------------------- //

void TokenCollection::pop_back()
{
    tc_tokens.pop_back();
}

//------------------------------------------------------------------- //

// zwraca iterator na pierwszy element za usunietym
TokenCollection::iterator 
TokenCollection::remove( TokenCollection::iterator position )
{
    return tc_tokens.erase( position );
}

//------------------------------------------------------------------- //

// [_start, end)
void TokenCollection::remove( TokenCollection::iterator _start, 
                              TokenCollection::iterator end )
{
    tc_tokens.erase( _start, end );
}

//------------------------------------------------------------------- //

void TokenCollection::removeAllAfter( TokenCollection::iterator _after )
{
    tc_tokens.erase( ++_after, tc_tokens.end() );
}

//------------------------------------------------------------------- //

void TokenCollection::clear()
{
    tc_tokens.clear();
}

//------------------------------------------------------------------- //

// '\r', '\n', (comments), ' ', '\t'
void TokenCollection::removeCfws()
{
    iterator comment_start,
             it = tc_tokens.begin();
    
    // fws = CRLF | WSP
    // comment = LB opt_fws ccontent_seq RB opt_cfws
    while( it != tc_tokens.end() )
    {
        switch( it->type() )
        {
            case yy::BisonParser::token::CRLF:
            case yy::BisonParser::token::WSP:
                it = tc_tokens.erase( it );
                break;

            case yy::BisonParser::token::LB:
                comment_start = it;
                ++ it;
                break;
            
            case yy::BisonParser::token::RB:
                ++ it;
                tc_tokens.erase( comment_start, it );
                break;
            
            default:
                ++ it;
                break;
        }
    }
}

//------------------------------------------------------------------- //

// '\r', '\n', ' ', '\t'
void TokenCollection::removeFws()
{
    iterator it = tc_tokens.begin();
    
    // fws = CRLF | WSP
    while( it != tc_tokens.end() )
    {
        if( it->type() == yy::BisonParser::token::CRLF 
            || it->type() == yy::BisonParser::token::WSP )
            it = tc_tokens.erase( it );
        
        else
            ++ it;
    }
}

//------------------------------------------------------------------- //

void TokenCollection::removeType( int token_type )
{
    iterator it = tc_tokens.begin();
    
    // fws = CRLF | WSP
    while( it != tc_tokens.end() )
    {
        if( it->type() == token_type )
            it = tc_tokens.erase( it );
        else
            ++ it;
    }
}

//------------------------------------------------------------------- //

void TokenCollection::removeTypes( std::list<int> token_types )
{
    iterator it = tc_tokens.begin();
    
    // fws = CRLF | WSP
    while( it != tc_tokens.end() )
    {
        bool found = false;
        for( std::list<int>::const_iterator jt = token_types.begin();
             jt != token_types.end();
             ++ jt )
         {
            if( it->type() == *jt )
            {
                it = tc_tokens.erase( it );
                found = true;
                break;
            }
        }
        if( !found )
            ++ it;
    }
}

//------------------------------------------------------------------- //

// [_start, end)
void TokenCollection::change( TokenCollection::iterator _start, 
                              TokenCollection::iterator end, 
                              const Token &to_what )
{
    tc_tokens.erase( _start, end );
    tc_tokens.insert( end, to_what );
}

//------------------------------------------------------------------- //

// [_start, end)
void TokenCollection::change( TokenCollection::iterator _start, 
                              TokenCollection::iterator end, 
                              const TokenCollection &to_what )
{
    tc_tokens.erase( _start, end );
    tc_tokens.insert( end, to_what.begin(), to_what.end() );
}
 
//------------------------------------------------------------------- //

// throws Collection::PositionError
Token & TokenCollection::head()               
{
    if( tc_tokens.empty() )
        throw Collection::PositionError( typeid(*this).name() );
    
    return tc_tokens.front();
}

//------------------------------------------------------------------- //

// throws Collection::PositionError
const Token & TokenCollection::head() const
{
    if( tc_tokens.empty() )
        throw Collection::PositionError( typeid(*this).name() );
    
    return tc_tokens.front();
}

//------------------------------------------------------------------- //

// throws Collection::PositionError
Token & TokenCollection::tail()
{
    if( tc_tokens.empty() )
        throw Collection::PositionError( typeid(*this).name() );
    
    return tc_tokens.back();
}

//------------------------------------------------------------------- //

// throws Collection::PositionError
const Token & TokenCollection::tail() const
{
    if( tc_tokens.empty() )
        throw Collection::PositionError( typeid(*this).name() );
    
    return tc_tokens.back();
}

//------------------------------------------------------------------- //

TokenCollection::iterator TokenCollection::begin()
{
    return tc_tokens.begin();
}

//------------------------------------------------------------------- //

TokenCollection::const_iterator TokenCollection::begin() const
{
    return tc_tokens.begin();
}

//------------------------------------------------------------------- //

TokenCollection::iterator TokenCollection::end()
{
    return tc_tokens.end();
}

//------------------------------------------------------------------- //

TokenCollection::const_iterator TokenCollection::end() const
{
    return tc_tokens.end();
}

//------------------------------------------------------------------- //

TokenCollection::reverse_iterator TokenCollection::rbegin()
{
    return tc_tokens.rbegin();
}

//------------------------------------------------------------------- //

TokenCollection::const_reverse_iterator TokenCollection::rbegin() const
{
    return tc_tokens.rbegin();
}

//------------------------------------------------------------------- //

TokenCollection::reverse_iterator TokenCollection::rend()
{
    return tc_tokens.rend();
}

//------------------------------------------------------------------- //

TokenCollection::const_reverse_iterator TokenCollection::rend() const
{
    return tc_tokens.rend();
}
 
//------------------------------------------------------------------- //

bool TokenCollection::empty() const
{
    return tc_tokens.empty();
}

//------------------------------------------------------------------- //

std::size_t TokenCollection::size() const
{
    return tc_tokens.size();
}

//------------------------------------------------------------------- //

// [_start, end)
std::string TokenCollection::toStr( TokenCollection::const_iterator _start, 
                                    TokenCollection::const_iterator end,
                                    std::list<int> ommit_tokens ) const
{
    std::string ret;
    
    while( _start != end && _start != tc_tokens.end() )
    {
        bool found = false;
        for( std::list<int>::const_iterator ommit = ommit_tokens.begin();
             ommit != ommit_tokens.end();
             ++ommit )
        {
            if( _start->type() == *ommit )
            {
                found = true;
                break;
            }
        }

        if( !found )
            ret += _start->value();
         ++ _start;
    }
    
    return ret;
}

//------------------------------------------------------------------- //

std::string TokenCollection::toStr( std::list<int> ommit_tokens ) const
{
    return TokenCollection::toStr( begin(), end(), ommit_tokens );    
}
