#include <sdk.h>
#include <cctype>
#include <wx/utils.h>
#include <wx/file.h>
#include <wx/msgdlg.h>
#include "globals.h"
#include "logmanager.h"
#include "manager.h"
#include "cclogger.h"
#include "expression.h"
#include "tokenizer.h"
#include "tokentree.h"

#define CC_TOKENIZER_DEBUG_OUTPUT 0

namespace TokenizerConsts {
  const wxString colon( _T( ":" ) );
  const wxString colon_colon( _T( "::" ) );
  const wxString equal( _T( "=" ) );
  const wxString kw_if( _T( "if" ) );
  const wxString kw_ifdef( _T( "ifdef" ) );
  const wxString kw_ifndef( _T( "ifndef" ) );
  const wxString kw_elif( _T( "elif" ) );
  const wxString kw_elifdef( _T( "elifdef" ) );
  const wxString kw_elifndef( _T( "elifndef" ) );
  const wxString kw_else( _T( "else" ) );
  const wxString kw_endif( _T( "endif" ) );
  const wxString hash( _T( "#" ) );
  const wxString tabcrlf( _T( "\t\n\r" ) );
  const wxString kw_define( _T( "define" ) );
  const wxString kw_undef( _T( "undef" ) );
}
static const size_t s_MaxMacroReplaceDepth = 5;

Tokenizer::Tokenizer( TokenTree* tokenTree, const wxString& filename ) :
  m_TokenTree( tokenTree ),
  m_Filename( filename ),
  m_BufferLen( 0 ),
  m_TokenIndex( 0 ),
  m_LineNumber( 1 ),
  m_NestLevel( 0 ),
  m_UndoTokenIndex( 0 ),
  m_UndoLineNumber( 1 ),
  m_UndoNestLevel( 0 ),
  m_PeekAvailable( false ),
  m_PeekTokenIndex( 0 ),
  m_PeekLineNumber( 0 ),
  m_PeekNestLevel( 0 ),
  m_SavedTokenIndex( 0 ),
  m_SavedLineNumber( 1 ),
  m_SavedNestingLevel( 0 ),
  m_IsOK( false ),
  m_State( tsNormal ),
  m_Loader( 0 ),
  m_NextTokenDoc(),
  m_LastTokenIdx( -1 ),
  m_ReadingMacroDefinition( false ) {
  m_TokenizerOptions.wantPreprocessor = true;
  m_TokenizerOptions.storeDocumentation = true;
  if( !m_Filename.IsEmpty() ) {
    Init( m_Filename );
  }
}

Tokenizer::~Tokenizer() {
}

bool Tokenizer::Init( const wxString& filename, LoaderBase* loader ) {
  m_Loader = loader;
  BaseInit();
  if( filename.IsEmpty() ) {
    if( m_Filename.IsEmpty() ) {
      return false;
    }
  } else {
    m_Filename = filename;
  }
  if( !wxFileExists( m_Filename ) ) {
    return false;
  }
  if( !ReadFile() ) {
    return false;
  }
  if( !m_BufferLen ) {
    return false;
  }
  while( m_Filename.Replace( _T( "\\" ), _T( "/" ) ) ) ;
  m_FileIdx = m_TokenTree->GetFileIndex( m_Filename );
  m_IsOK = true;
  return true;
}

bool Tokenizer::InitFromBuffer( const wxString& buffer, const wxString& fileOfBuffer, size_t initLineNumber ) {
  BaseInit();
  m_BufferLen  = buffer.Length();
  m_Buffer = buffer + _T( " " );
  m_IsOK = true;
  m_Filename = fileOfBuffer;
  m_LineNumber = initLineNumber;
  while( m_Filename.Replace( _T( "\\" ), _T( "/" ) ) ) ;
  m_FileIdx = m_TokenTree->GetFileIndex( m_Filename );
  return true;
}

void Tokenizer::BaseInit() {
  m_BufferLen = 0;
  m_TokenIndex = 0;
  m_LineNumber = 1;
  m_NestLevel = 0;
  m_UndoTokenIndex = 0;
  m_UndoLineNumber = 1;
  m_UndoNestLevel = 0;
  m_PeekTokenIndex = 0;
  m_PeekLineNumber = 0;
  m_PeekNestLevel = 0;
  m_SavedTokenIndex = 0;
  m_SavedLineNumber = 1;
  m_SavedNestingLevel = 0;
  m_IsOK = false;
  m_Buffer.Clear();
  m_NextTokenDoc.clear();
  m_LastTokenIdx = -1;
}

bool Tokenizer::ReadFile() {
  bool success = false;
  wxString fileName = wxEmptyString;
  if( m_Loader ) {
    fileName    = m_Loader->FileName();
    char* data  = m_Loader->GetData();
    m_BufferLen = m_Loader->GetLength();
    m_Buffer = wxString( data, wxConvUTF8, m_BufferLen + 1 );
    if( m_Buffer.Length() == 0 ) {
      m_Buffer = wxString( data, wxConvISO8859_1, m_BufferLen + 1 );
    }
    success = ( data != 0 );
  } else {
    if( !wxFileExists( m_Filename ) ) {
      return false;
    }
    wxFile file( m_Filename );
    if( !cbRead( file, m_Buffer ) ) {
      return false;
    }
    fileName = m_Filename;
    success  = true;
  }
  m_BufferLen = m_Buffer.Length();
  m_Buffer += _T( ' ' );
  return success;
}

bool Tokenizer::SkipWhiteSpace() {
  if( CurrentChar() > _T( ' ' ) || IsEOF() ) {
    return false;
  }
  while( CurrentChar() <= _T( ' ' ) && MoveToNextChar() ) ;
  return true;
}

bool Tokenizer::IsEscapedChar() {
  if( PreviousChar() != '\\' ) {
    return false;
  } else {
    unsigned int numBackslash = 2;
    while( m_TokenIndex >= numBackslash && ( ( m_TokenIndex - numBackslash ) <= m_BufferLen )
           && ( m_Buffer.GetChar( m_TokenIndex - numBackslash ) == '\\' ) ) {
      ++numBackslash;
    }
    if( ( numBackslash % 2 ) == 1 ) {
      return false;
    } else {
      return true;
    }
  }
}

bool Tokenizer::SkipToChar( const wxChar& ch ) {
  while( CurrentChar() != ch && MoveToNextChar() )
    ;
  return NotEOF();
}

bool Tokenizer::SkipToStringEnd( const wxChar& ch ) {
  while( true ) {
    while( CurrentChar() != ch && MoveToNextChar() )
      ;
    if( IsEOF() ) {
      return false;
    }
    if( IsEscapedChar() ) {
      MoveToNextChar();
    } else
    { break; }
  }
  return true;
}

bool Tokenizer::MoveToNextChar() {
  ++m_TokenIndex;
  if( IsEOF() ) {
    m_TokenIndex = m_BufferLen;
    return false;
  }
  if( PreviousChar() == _T( '\n' ) ) {
    ++m_LineNumber;
  }
  return true;
}

bool Tokenizer::SkipString() {
  if( IsEOF() ) {
    return false;
  }
  const wxChar ch = CurrentChar();
  if( ch == _T( '"' ) || ch == _T( '\'' ) ) {
    MoveToNextChar();
    SkipToStringEnd( ch );
    MoveToNextChar();
    return true;
  }
  return false;
}

wxString Tokenizer::ReadToEOL( bool stripUnneeded ) {
  m_ReadingMacroDefinition = true;
  if( stripUnneeded ) {
    static const size_t maxBufferLen = 4094;
    wxChar buffer[maxBufferLen + 2];
    wxChar* p = buffer;
    wxString str;
    for( ;; ) {
      while( NotEOF() && CurrentChar() != _T( '\n' ) ) {
        if( CurrentChar() == _T( '/' ) && NextChar() == _T( '/' ) ) {
          break;
        }
        while( SkipComment() )
          ;
        const wxChar ch = CurrentChar();
        if( ch == _T( '\n' ) ) {
          break;
        }
        if( ch <= _T( ' ' ) && p > buffer && *( p - 1 ) == ch ) {
          MoveToNextChar();
          continue;
        }
        if( ch == _T( '"' ) || ch == _T( '\'' ) ) {
          if( p > buffer ) {
            str.Append( buffer, p - buffer );
            p = buffer;
          }
          int literal_start = m_TokenIndex;
          SkipString();
          str.Append( m_Buffer.wx_str() + literal_start, ( m_TokenIndex - literal_start ) );
          continue;
        }
        *p = ch;
        ++p;
        if( p >= buffer + maxBufferLen ) {
          str.Append( buffer, p - buffer );
          p = buffer;
        }
        MoveToNextChar();
      }
      if( !IsBackslashBeforeEOL() || IsEOF() ) {
        break;
      } else {
        while( p > buffer && *( --p ) <= _T( ' ' ) )
          ;
        MoveToNextChar();
      }
    }
    while( p > buffer && *( p - 1 ) <= _T( ' ' ) ) {
      --p;
    }
    if( p > buffer ) {
      str.Append( buffer, p - buffer );
    }
    m_ReadingMacroDefinition = false;
    return str;
  } else {
    const unsigned int idx = m_TokenIndex;
    SkipToEOL();
    m_ReadingMacroDefinition = false;
    return m_Buffer.Mid( idx, m_TokenIndex - idx );
  }
}

void Tokenizer::ReadParentheses( wxString& str ) {
  int level = 1;
  while( NotEOF() ) {
    wxString token = DoGetToken();
    if( token == _T( "(" ) ) {
      ++level;
      str << token;
    } else if( token == _T( ")" ) ) {
      --level;
      str << token;
      if( level == 0 ) {
        break;
      }
    } else if( token == _T( "*" ) || token == _T( "&" ) ) {
      str << token;
    } else if( token == _T( "=" ) ) {
      str << _T( " " ) << token << _T( " " );
    } else if( token == _T( "," ) ) {
      str << token << _T( " " );
    } else {
      wxChar nextChar = token[0];
      wxChar lastChar = str.Last();
      if( ( wxIsalpha( nextChar ) || nextChar == _T( '_' ) )
          && ( wxIsalnum( lastChar ) || lastChar == _T( '_' )
               || lastChar == _T( '*' ) || lastChar == _T( '&' ) || lastChar == _T( ')' ) ) ) {
        str << _T( " " ) << token;
      } else
      { str << token; }
    }
    if( level == 0 ) {
      break;
    }
  }
}

bool Tokenizer::SkipToEOL() {
  for( ;; ) {
    while( NotEOF() && CurrentChar() != '\n' ) {
      if( CurrentChar() == '/' && NextChar() == '*' ) {
        SkipComment();
        if( CurrentChar() == _T( '\n' ) ) {
          break;
        }
      }
      MoveToNextChar();
    }
    if( !IsBackslashBeforeEOL() || IsEOF() ) {
      break;
    } else {
      MoveToNextChar();
    }
  }
  return NotEOF();
}

bool Tokenizer::SkipToInlineCommentEnd() {
  while( true ) {
    SkipToChar( _T( '\n' ) );
    if( !IsBackslashBeforeEOL() || IsEOF() ) {
      break;
    } else
    { MoveToNextChar(); }
  }
  return NotEOF();
}

bool Tokenizer::SkipComment() {
  if( IsEOF() ) {
    return false;
  }
  bool cstyle;
  if( CurrentChar() == '/' ) {
    if( NextChar() == '*' ) {
      cstyle = true;
    } else if( NextChar() == '/' ) {
      cstyle = false;
    } else {
      return false;
    }
  } else {
    return false;
  }
  MoveToNextChar();
  MoveToNextChar();
  bool isDoc = false;
  if( m_TokenizerOptions.storeDocumentation ) {
    isDoc = ( CurrentChar() == '!' );
    if( !isDoc && cstyle ) {
      isDoc = ( CurrentChar() == '*' && NextChar() != '/' && NextChar() != '*' );
    }
    if( !isDoc && !cstyle ) {
      isDoc = ( CurrentChar() == '/' && NextChar() != '/' );
    }
  }
  if( isDoc ) {
    isDoc = m_ExpressionResult.empty() || m_ExpressionResult.top();
  }
  if( !isDoc ) {
    while( true ) {
      if( cstyle ) {
        SkipToChar( '*' );
        if( NextChar() == '/' ) {
          MoveToNextChar();
          MoveToNextChar();
          break;
        }
        if( !MoveToNextChar() ) {
          break;
        }
      } else {
        SkipToInlineCommentEnd();
        break;
      }
    }
  } else {
    wxString doc;
    MoveToNextChar();
    wxChar c = CurrentChar();
    int lineToAppend = -1;
    if( c == _T( '<' ) ) {
      MoveToNextChar();
      c = CurrentChar();
      lineToAppend = m_LineNumber;
    }
    if( cstyle ) {
      while( true ) {
        c = CurrentChar();
        if( c == '*' && NextChar() == '/' ) {
          MoveToNextChar();
          MoveToNextChar();
          break;
        } else {
          doc += c;
          if( !MoveToNextChar() ) {
            break;
          }
        }
      }
    } else {
      while( true ) {
        c = CurrentChar();
        if( c == '\n' ) {
          if( IsBackslashBeforeEOL() ) {
            MoveToNextChar();
            continue;
          } else {
            break;
          }
        } else {
          doc += c;
          if( !MoveToNextChar() ) {
            break;
          }
        }
      }
    }
    if( doc.size() > 0 ) {
      doc += _T( '\n' );
      if( lineToAppend >= 0 ) {
        if( m_ReadingMacroDefinition ) {
          m_NextTokenDoc = doc + m_NextTokenDoc;
        } else {
          if( m_LastTokenIdx != -1 ) {
            m_TokenTree->AppendDocumentation( m_LastTokenIdx, m_FileIdx, m_NextTokenDoc + doc );
          }
          m_NextTokenDoc.clear();
        }
      } else {
        while( SkipWhiteSpace() ) ;
        bool skipped = SkipComment();
        if( !cstyle && skipped ) {
          doc = _T( "@brief " ) + doc + _T( '\n' );
        }
        m_NextTokenDoc = doc + m_NextTokenDoc;
      }
    }
  }
  return true;
}

bool Tokenizer::SkipPreprocessorBranch() {
  wxChar c = CurrentChar();
  const unsigned int startIndex = m_TokenIndex;
  if( c == _T( '#' ) ) {
    const PreprocessorType type = GetPreprocessorType();
    if( type != ptOthers ) {
      HandleConditionPreprocessor( type );
      c = CurrentChar();
      return true;
    } else {
      return false;
    }
  }
  if( startIndex != m_TokenIndex ) {
    return true;
  } else {
    return false;
  }
}

bool Tokenizer::SkipUnwanted() {
  while( SkipWhiteSpace() || SkipComment() || SkipPreprocessorBranch() ) ;
  return NotEOF();
}

wxString Tokenizer::GetToken() {
  m_UndoTokenIndex = m_TokenIndex;
  m_UndoLineNumber = m_LineNumber;
  m_UndoNestLevel  = m_NestLevel;
  if( m_PeekAvailable ) {
    m_TokenIndex = m_PeekTokenIndex;
    m_LineNumber = m_PeekLineNumber;
    m_NestLevel  = m_PeekNestLevel;
    m_Token      = m_PeekToken;
  } else {
    if( SkipUnwanted() ) {
      m_Token = DoGetToken();
      if( m_Token == _T( "(" ) && m_State ^ tsRawExpression ) {
        ReadParentheses( m_Token );
      }
    } else {
      m_Token.Clear();
    }
  }
  m_PeekAvailable = false;
  return m_Token;
}

wxString Tokenizer::PeekToken() {
  if( !m_PeekAvailable ) {
    m_SavedTokenIndex   = m_TokenIndex;
    m_SavedLineNumber   = m_LineNumber;
    m_SavedNestingLevel = m_NestLevel;
    if( SkipUnwanted() ) {
      m_PeekToken = DoGetToken();
      if( m_PeekToken == _T( "(" ) && m_State ^ tsRawExpression ) {
        ReadParentheses( m_PeekToken );
      }
    } else {
      m_PeekToken.Clear();
    }
    m_PeekAvailable     = true;
    m_PeekTokenIndex    = m_TokenIndex;
    m_PeekLineNumber    = m_LineNumber;
    m_PeekNestLevel     = m_NestLevel;
    m_TokenIndex        = m_SavedTokenIndex;
    m_LineNumber        = m_SavedLineNumber;
    m_NestLevel         = m_SavedNestingLevel;
  }
  return m_PeekToken;
}

void Tokenizer::UngetToken() {
  m_PeekTokenIndex = m_TokenIndex;
  m_PeekLineNumber = m_LineNumber;
  m_PeekNestLevel  = m_NestLevel;
  m_TokenIndex     = m_UndoTokenIndex;
  m_LineNumber     = m_UndoLineNumber;
  m_NestLevel      = m_UndoNestLevel;
  m_PeekToken      = m_Token;
  m_PeekAvailable  = true;
}

wxString Tokenizer::DoGetToken() {
  while( true ) {
    SkipUnwanted();
    bool identifier = Lex();
    if( identifier ) {
      bool replaced = CheckMacroUsageAndReplace();
      if( replaced ) {
        continue;
      } else {
        return m_Lex;
      }
    } else {
      return m_Lex;
    }
  }
}

bool Tokenizer::Lex() {
  int start = m_TokenIndex;
  bool identifier = false;
  wxChar c = CurrentChar();
  if( c == '_' || wxIsalpha( c ) ) {
    while( ( ( c == '_' ) || ( wxIsalnum( c ) ) ) &&  MoveToNextChar() ) {
      c = CurrentChar();
    }
    if( IsEOF() ) {
      m_Lex = wxEmptyString;
      return false;
    }
    identifier = true;
    m_Lex = m_Buffer.Mid( start, m_TokenIndex - start );
  } else if( wxIsdigit( c ) ) {
    while( NotEOF() && CharInString( CurrentChar(), _T( "0123456789.abcdefABCDEFXxLl" ) ) ) {
      MoveToNextChar();
    }
    if( IsEOF() ) {
      m_Lex = wxEmptyString;
      return false;
    }
    m_Lex = m_Buffer.Mid( start, m_TokenIndex - start );
  } else if( ( c == '"' ) || ( c == '\'' ) ) {
    SkipString();
    m_Lex = m_Buffer.Mid( start, m_TokenIndex - start );
  } else if( c == ':' ) {
    if( NextChar() == ':' ) {
      MoveToNextChar();
      MoveToNextChar();
      m_Lex.assign( TokenizerConsts::colon_colon );
    } else {
      MoveToNextChar();
      m_Lex.assign( TokenizerConsts::colon );
    }
  } else if( c == '=' ) {
    wxChar next = NextChar();
    if( next == _T( '=' ) || next == _T( '!' ) || next == _T( '>' ) || next == _T( '<' ) ) {
      MoveToNextChar();
      MoveToNextChar();
      m_Lex = m_Buffer.Mid( start, m_TokenIndex - start );
    } else {
      MoveToNextChar();
      m_Lex.assign( TokenizerConsts::equal );
    }
  } else {
    if( c == '{' ) {
      ++m_NestLevel;
    } else if( c == '}' ) {
      --m_NestLevel;
    }
    m_Lex = c;
    MoveToNextChar();
  }
  while( !m_ExpandedMacros.empty() && m_ExpandedMacros.front().m_End < m_TokenIndex ) {
    m_ExpandedMacros.pop_front();
  }
  return identifier;
}

bool Tokenizer::CheckMacroUsageAndReplace() {
  const int id = m_TokenTree->TokenExists( m_Lex, -1, tkMacroDef );
  if( id != -1 ) {
    const Token* token = m_TokenTree->at( id );
    if( token ) {
      return ReplaceMacroUsage( token );
    }
  }
  return false;
}

bool Tokenizer::CalcConditionExpression() {
  const TokenizerState oldState = m_State;
  m_State = tsRawExpression;
  const unsigned int undoIndex = m_TokenIndex;
  const unsigned int undoLine = m_LineNumber;
  SkipToEOL();
  const unsigned int untouchedBufferLen = m_BufferLen - m_TokenIndex;
  m_TokenIndex = undoIndex;
  m_LineNumber = undoLine;
  Expression exp;
  while( m_TokenIndex < m_BufferLen - untouchedBufferLen ) {
    while( SkipWhiteSpace() || SkipComment() ) ;
    if( m_TokenIndex >= m_BufferLen - untouchedBufferLen ) {
      break;
    }
    wxString token = DoGetToken();
    if( token.Len() > 0 && ( token[0] == _T( '_' ) || wxIsalnum( token[0] ) ) ) {
      if( token == _T( "defined" ) ) {
        if( IsMacroDefined() ) {
          exp.AddToInfixExpression( _T( "1" ) );
        } else
        { exp.AddToInfixExpression( _T( "0" ) ); }
      } else
      { exp.AddToInfixExpression( token ); }
    } else if( token.StartsWith( _T( "0x" ) ) ) {
      long value;
      if( token.ToLong( &value, 16 ) ) {
        exp.AddToInfixExpression( F( _T( "%ld" ), value ) );
      } else
      { exp.AddToInfixExpression( _T( "0" ) ); }
    } else
    { exp.AddToInfixExpression( token ); }
  }
  m_State = oldState;
  exp.ConvertInfixToPostfix();
  if( exp.CalcPostfix() ) {
    return exp.GetStatus() && exp.GetResult();
  }
  return false;
}

bool Tokenizer::IsMacroDefined() {
  while( SkipWhiteSpace() || SkipComment() ) ;
  bool haveParen = false;
  Lex();
  wxString token = m_Lex;
  if( token == _T( "(" ) ) {
    haveParen = true;
    while( SkipWhiteSpace() || SkipComment() ) ;
    Lex();
    token = m_Lex;
  }
  int id = m_TokenTree->TokenExists( token, -1, tkMacroDef );
  if( haveParen ) {
    while( SkipWhiteSpace() || SkipComment() ) ;
    Lex();
  }
  return ( id != -1 );
}

void Tokenizer::SkipToNextConditionPreprocessor() {
  do {
    wxChar ch = CurrentChar();
    if( ch == _T( '\'' ) || ch == _T( '"' ) || ch == _T( '/' ) || ch <= _T( ' ' ) ) {
      while( SkipWhiteSpace() || SkipString() || SkipComment() )
        ;
      ch = CurrentChar();
    }
    if( ch == _T( '#' ) ) {
      const unsigned int undoIndex = m_TokenIndex;
      const unsigned int undoLine = m_LineNumber;
      MoveToNextChar();
      while( SkipWhiteSpace() || SkipComment() ) ;
      const wxChar current = CurrentChar();
      const wxChar next = NextChar();
      if( current == _T( 'i' ) && next == _T( 'f' ) ) {
        SkipToEndConditionPreprocessor();
      } else if( current == _T( 'e' ) && ( next == _T( 'l' ) || next == _T( 'n' ) ) ) {
        m_TokenIndex = undoIndex;
        m_LineNumber = undoLine;
        break;
      }
    }
  } while( MoveToNextChar() );
}

void Tokenizer::SkipToEndConditionPreprocessor() {
  do {
    wxChar ch = CurrentChar();
    if( ch == _T( '\'' ) || ch == _T( '"' ) || ch == _T( '/' ) || ch <= _T( ' ' ) ) {
      while( SkipWhiteSpace() || SkipString() || SkipComment() )
        ;
      ch = CurrentChar();
    }
    if( ch == _T( '#' ) ) {
      MoveToNextChar();
      while( SkipWhiteSpace() || SkipComment() )
        ;
      const wxChar current = CurrentChar();
      const wxChar next = NextChar();
      if( current == _T( 'i' ) && next == _T( 'f' ) ) {
        SkipToEndConditionPreprocessor();
      } else if( current == _T( 'e' ) && next == _T( 'n' ) ) {
        SkipToEOL();
        break;
      }
    }
  } while( MoveToNextChar() );
}

PreprocessorType Tokenizer::GetPreprocessorType() {
  const unsigned int undoIndex = m_TokenIndex;
  const unsigned int undoLine = m_LineNumber;
  const unsigned int undoNest = m_NestLevel;
  MoveToNextChar();
  while( SkipWhiteSpace() || SkipComment() )
    ;
  Lex();
  const wxString token = m_Lex;
  switch( token.Len() ) {
    case 2:
      if( token == TokenizerConsts::kw_if ) {
        return ptIf;
      }
      break;
    case 4:
      if( token == TokenizerConsts::kw_else ) {
        return ptElse;
      } else if( token == TokenizerConsts::kw_elif ) {
        return ptElif;
      }
      break;
    case 5:
      if( token == TokenizerConsts::kw_ifdef ) {
        return ptIfdef;
      } else if( token == TokenizerConsts::kw_endif ) {
        return ptEndif;
      } else if( token == TokenizerConsts::kw_undef ) {
        return ptUndef;
      }
      break;
    case 6:
      if( token == TokenizerConsts::kw_ifndef ) {
        return ptIfndef;
      } else if( token == TokenizerConsts::kw_define ) {
        return ptDefine;
      }
      break;
    case 7:
      if( token == TokenizerConsts::kw_elifdef ) {
        return ptElifdef;
      }
      break;
    case 8:
      if( token == TokenizerConsts::kw_elifndef ) {
        return ptElifndef;
      }
      break;
    default:
      break;
  }
  m_TokenIndex = undoIndex;
  m_LineNumber = undoLine;
  m_NestLevel = undoNest;
  return ptOthers;
}

void Tokenizer::HandleConditionPreprocessor( const PreprocessorType type ) {
  switch( type ) {
    case ptIf: {
      bool result;
      if( m_TokenizerOptions.wantPreprocessor ) {
        result = CalcConditionExpression();
      } else {
        SkipToEOL();
        result = true;
      }
      m_ExpressionResult.push( result );
      if( !result ) {
        SkipToNextConditionPreprocessor();
      }
    }
    break;
    case ptIfdef: {
      bool result;
      if( m_TokenizerOptions.wantPreprocessor ) {
        result = IsMacroDefined();
      } else
      { result = true; }
      SkipToEOL();
      m_ExpressionResult.push( result );
      if( !result ) {
        SkipToNextConditionPreprocessor();
      }
    }
    break;
    case ptIfndef: {
      bool result;
      if( m_TokenizerOptions.wantPreprocessor ) {
        result = !IsMacroDefined();
      } else
      { result = true; }
      SkipToEOL();
      m_ExpressionResult.push( result );
      if( !result ) {
        SkipToNextConditionPreprocessor();
      }
    }
    break;
    case ptElif: {
      bool result = false;
      if( !m_ExpressionResult.empty() && !m_ExpressionResult.top() ) {
        result = CalcConditionExpression();
      }
      if( result ) {
        m_ExpressionResult.top() = true;
      } else
      { SkipToNextConditionPreprocessor(); }
    }
    break;
    case ptElifdef: {
      bool result = false;
      if( !m_ExpressionResult.empty() && !m_ExpressionResult.top() ) {
        result = IsMacroDefined();
        SkipToEOL();
      }
      if( result ) {
        m_ExpressionResult.top() = true;
      } else
      { SkipToNextConditionPreprocessor(); }
    }
    break;
    case ptElifndef: {
      bool result = false;
      if( !m_ExpressionResult.empty() && !m_ExpressionResult.top() ) {
        result = !IsMacroDefined();
        SkipToEOL();
      }
      if( result ) {
        m_ExpressionResult.top() = true;
      } else
      { SkipToNextConditionPreprocessor(); }
    }
    break;
    case ptElse: {
      if( !m_ExpressionResult.empty() && !m_ExpressionResult.top() ) {
        SkipToEOL();
      } else
      { SkipToEndConditionPreprocessor(); }
    }
    break;
    case ptEndif: {
      SkipToEOL();
      if( !m_ExpressionResult.empty() ) {
        m_ExpressionResult.pop();
      }
    }
    break;
    case ptDefine: {
      HandleDefines();
    }
    break;
    case ptUndef: {
      HandleUndefs();
    }
    break;
    case ptOthers:
    default:
      break;
  }
  m_SavedTokenIndex   = m_UndoTokenIndex = m_TokenIndex;
  m_SavedLineNumber   = m_UndoLineNumber = m_LineNumber;
  m_SavedNestingLevel = m_UndoNestLevel  = m_NestLevel;
}

bool Tokenizer::SplitArguments( wxArrayString& results ) {
  while( SkipWhiteSpace() || SkipComment() )
    ;
  if( CurrentChar() != _T( '(' ) ) {
    return false;
  }
  MoveToNextChar();
  while( SkipWhiteSpace() || SkipComment() )
    ;
  const TokenizerState oldState = m_State;
  m_State = tsRawExpression;
  const unsigned int oldNestLevel = m_NestLevel;
  int level = 1;
  wxString piece;
  while( NotEOF() ) {
    Lex();
    wxString token = m_Lex;
    if( token.IsEmpty() ) {
      break;
    }
    if( token == _T( "(" ) ) {
      ++level;
    } else if( token == _T( ")" ) ) {
      --level;
    }
    if( token == _T( "," ) && level == 1 ) {
      results.Add( piece );
      piece.Clear();
    } else if( level != 0 ) {
      if( !piece.IsEmpty() && piece.Last() > _T( ' ' ) ) {
        piece << _T( " " );
      }
      piece << token;
    }
    if( level == 0 ) {
      if( !piece.IsEmpty() ) {
        results.Add( piece );
      }
      break;
    }
    while( SkipWhiteSpace() || SkipComment() )
      ;
  }
  m_State = oldState;
  m_NestLevel = oldNestLevel;
  return true;
}

bool Tokenizer::ReplaceBufferText( const wxString& target, const Token* macro ) {
  if( target.IsEmpty() ) {
    return true;
  }
  if( m_ExpandedMacros.size() >= s_MaxMacroReplaceDepth ) {
    m_PeekAvailable = false;
    return true;
  } else if( macro ) {
    ExpandedMacro rep;
    rep.m_End = m_TokenIndex;
    rep.m_Macro = macro;
    m_ExpandedMacros.push_front( rep );
  }
  wxString substitute( target );
  for( size_t i = 0; i < substitute.Len(); ++i ) {
    switch( ( wxChar )substitute.GetChar( i ) ) {
      case _T( '\\' ):
      case _T( '\r' ):
      case _T( '\n' ):
        substitute.SetChar( i, _T( ' ' ) );
        break;
      default:
        break;
    }
  }
  const size_t len = substitute.Len();
  if( m_TokenIndex < len ) {
    const size_t diffLen = len - m_TokenIndex;
    m_Buffer.insert( 0, wxString( _T( ' ' ), diffLen ) );
    m_BufferLen += diffLen;
    m_TokenIndex += diffLen;
    for( std::list<ExpandedMacro>::iterator i = m_ExpandedMacros.begin();
         i != m_ExpandedMacros.end();
         ++i ) {
      ( *i ).m_Begin += diffLen;
      ( *i ).m_End += diffLen;
    }
  }
  wxChar* p = const_cast<wxChar*>( ( const wxChar* )m_Buffer ) + m_TokenIndex - len;
  memcpy( p, ( const wxChar* )target, len * sizeof( wxChar ) );
  m_TokenIndex -= len;
  if( macro ) {
    m_ExpandedMacros.front().m_Begin = m_TokenIndex;
  }
  m_SavedTokenIndex   = m_UndoTokenIndex = m_TokenIndex;
  m_SavedLineNumber   = m_UndoLineNumber = m_LineNumber;
  m_SavedNestingLevel = m_UndoNestLevel  = m_NestLevel;
  m_PeekAvailable = false;
  return true;
}

bool Tokenizer::ReplaceMacroUsage( const Token* tk ) {
  for( std::list<ExpandedMacro>::iterator i = m_ExpandedMacros.begin(); i != m_ExpandedMacros.end(); ++i ) {
    if( tk == ( *i ).m_Macro ) {
      return false;
    }
  }
  wxString macroExpandedText;
  if( GetMacroExpandedText( tk, macroExpandedText ) ) {
    return ReplaceBufferText( macroExpandedText, tk );
  }
  return false;
}

void Tokenizer::KMP_GetNextVal( const wxChar* pattern, int next[] ) {
  int j = 0, k = -1;
  next[0] = -1;
  while( pattern[j] != _T( '\0' ) ) {
    if( k == -1 || pattern[j] == pattern[k] ) {
      ++j;
      ++k;
      if( pattern[j] != pattern[k] ) {
        next[j] = k;
      } else {
        next[j] = next[k];
      }
    } else {
      k = next[k];
    }
  }
}

int Tokenizer::KMP_Find( const wxChar* text, const wxChar* pattern, const int patternLen ) {
  if( !text || !pattern || pattern[0] == _T( '\0' ) || text[0] == _T( '\0' ) ) {
    return -1;
  }
  if( patternLen > 1024 ) {
    if( patternLen < 5012 ) {
      ;
    } else {
      return -2;
    }
  }
  int next[patternLen];
  KMP_GetNextVal( pattern, next );
  int index = 0, i = 0, j = 0;
  while( text[i] != _T( '\0' ) && pattern[j] != _T( '\0' ) ) {
    if( text[i] == pattern[j] ) {
      ++i;
      ++j;
    } else {
      index += j - next[j];
      if( next[j] != -1 ) {
        j = next[j];
      } else {
        j = 0;
        ++i;
      }
    }
  }
  if( pattern[j] == _T( '\0' ) ) {
    return index;
  } else {
    return -1;
  }
}

void Tokenizer::SetLastTokenIdx( int tokenIdx ) {
  m_LastTokenIdx = tokenIdx;
  if( tokenIdx != -1 && !m_NextTokenDoc.IsEmpty() ) {
    if( m_ExpressionResult.empty() || m_ExpressionResult.top() ) {
      m_TokenTree->AppendDocumentation( tokenIdx, m_FileIdx, m_NextTokenDoc );
    }
  }
  m_NextTokenDoc.clear();
}

bool Tokenizer::GetMacroExpandedText( const Token* tk, wxString& expandedText ) {
  if( !tk || tk->m_Name == tk->m_FullType ) {
    return false;
  }
  if( tk->m_Args.IsEmpty() ) {
    expandedText = tk->m_FullType;
    return true;
  }
  wxArrayString formalArgs;
  if( ReplaceBufferText( tk->m_Args ) ) {
    SplitArguments( formalArgs );
  }
  wxArrayString actualArgs;
  if( !SplitArguments( actualArgs ) ) {
    m_Lex = tk->m_Name;
    return false;
  }
  if( formalArgs.GetCount() != actualArgs.GetCount() ) {
    return false;
  }
  expandedText = tk->m_FullType;
  const size_t totalCount = formalArgs.GetCount();
  for( size_t i = 0; i < totalCount; ++i ) {
    wxChar* data = const_cast<wxChar*>( ( const wxChar* )expandedText.GetData() );
    const wxChar* dataEnd = data + expandedText.Len();
    const wxChar* key = formalArgs[i].GetData();
    const int keyLen = formalArgs[i].Len();
    wxString alreadyReplaced;
    alreadyReplaced.Alloc( expandedText.Len() * 2 );
    while( true ) {
      const int pos = GetFirstTokenPosition( data, dataEnd - data, key, keyLen );
      if( pos != -1 ) {
        alreadyReplaced << wxString( data, pos ) << actualArgs[i];
        data += pos + keyLen;
        if( data == dataEnd ) {
          break;
        }
      } else {
        alreadyReplaced << data;
        break;
      }
    }
    expandedText = alreadyReplaced;
  }
  for( int pos = expandedText.Find( _T( "##" ) );
       pos != wxNOT_FOUND;
       pos = expandedText.Find( _T( "##" ) ) ) {
    int beginPos = pos;
    int length = expandedText.size();
    while( beginPos > 0 && expandedText[beginPos - 1] == _T( ' ' ) ) {
      beginPos--;
    }
    int endPos = pos + 1;
    while( endPos < length - 1 && expandedText[endPos + 1] == _T( ' ' ) ) {
      endPos++;
    }
    expandedText.Remove( beginPos, endPos - beginPos + 1 );
  }
  for( int pos = expandedText.Find( _T( "#" ) );
       pos != wxNOT_FOUND;
       pos = expandedText.Find( _T( "#" ) ) ) {
    int beginPos = pos;
    int length = expandedText.size();
    while( beginPos < length - 1 && expandedText[beginPos + 1] == _T( ' ' ) ) {
      beginPos++;
    }
    if( !wxIsalpha( expandedText[beginPos] ) ) {
      break;
    }
    int endPos = beginPos + 1;
    while( endPos < length - 1 && expandedText[endPos + 1] != _T( ' ' ) ) {
      endPos++;
    }
    endPos++;
    if( endPos == length ) {
      expandedText << _T( " " );
    }
    expandedText.SetChar( pos, _T( ' ' ) );
    expandedText.SetChar( beginPos, _T( '"' ) );
    expandedText.SetChar( endPos, _T( '"' ) );
  }
  return true;
}

int Tokenizer::GetFirstTokenPosition( const wxChar* buffer, const size_t bufferLen,
                                      const wxChar* key, const size_t keyLen ) {
  int pos = -1;
  wxChar* p = const_cast<wxChar*>( buffer );
  const wxChar* endBuffer = buffer + bufferLen;
  for( ;; ) {
    const int ret = KMP_Find( p, key, keyLen );
    if( ret == -1 ) {
      break;
    }
    p += ret;
    if( p > buffer ) {
      const wxChar ch = *( p - 1 );
      if( ch == _T( '_' ) || wxIsalnum( ch ) ) {
        p += keyLen;
        continue;
      }
    }
    p += keyLen;
    if( p < endBuffer ) {
      const wxChar ch = *p;
      if( ch == _T( '_' ) || wxIsalnum( ch ) ) {
        continue;
      }
    }
    pos = p - buffer - keyLen;
    break;
  }
  return pos;
}

void Tokenizer::HandleDefines() {
  size_t lineNr = GetLineNumber();
  while( SkipWhiteSpace() || SkipComment() )
    ;
  Lex();
  wxString token = m_Lex;
  if( token.IsEmpty() ) {
    return;
  }
  wxString readToEOL = ReadToEOL( true );
  wxString para;
  wxString replaceList;
  if( !readToEOL.IsEmpty() ) {
    if( readToEOL[0] == _T( '(' ) ) {
      int level = 1;
      size_t pos = 0;
      while( level && pos < readToEOL.Len() ) {
        wxChar ch = readToEOL.GetChar( ++pos );
        if( ch == _T( ')' ) ) {
          --level;
        } else if( ch == _T( '(' ) ) {
          ++level;
        }
      }
      para = readToEOL.Left( ++pos );
      replaceList << readToEOL.Right( readToEOL.Len() - ( ++pos ) );
    } else
    { replaceList << readToEOL; }
  }
  AddMacroDefinition( token, lineNr, para, replaceList );
}

void Tokenizer::HandleUndefs() {
  while( SkipWhiteSpace() || SkipComment() )
    ;
  Lex();
  wxString token = m_Lex;
  if( !token.IsEmpty() ) {
    int index = m_TokenTree->TokenExists( token, -1, tkMacroDef );
    if( index != wxNOT_FOUND ) {
      m_TokenTree->erase( index );
    }
  }
  SkipToEOL();
}

void Tokenizer::AddMacroDefinition( wxString name, int line, wxString para, wxString substitues ) {
  int index = m_TokenTree->TokenExists( name, -1, tkMacroDef );
  Token* token;
  if( index != wxNOT_FOUND ) {
    token = m_TokenTree->at( index );
  } else {
    token = new Token( name, m_FileIdx, line, ++m_TokenTree->m_TokenTicketCount );
    token->m_TokenKind = tkMacroDef;
    token->m_ParentIndex = -1;
    m_TokenTree->insert( token );
  }
  token->m_Args = para;
  token->m_FullType = substitues;
  SetLastTokenIdx( token->m_Index );
}
