#include <sdk.h>
#include "nativeparser_base.h"
#include "parser/tokenizer.h"
#include "parser/cclogger.h"

#define CC_NATIVEPARSERBASE_DEBUG_OUTPUT 0

NativeParserBase::NativeParserBase() {
}

NativeParserBase::~NativeParserBase() {
}

void NativeParserBase::Reset() {
  m_LastComponent.Clear();
}

size_t NativeParserBase::FindAIMatches( TokenTree* tree,
                                        std::queue<ParserComponent> components,
                                        TokenIdxSet& result,
                                        int parentTokenIdx,
                                        bool isPrefix,
                                        bool caseSensitive,
                                        bool use_inheritance,
                                        short int kindMask,
                                        TokenIdxSet* search_scope ) {
  if( components.empty() ) {
    return 0;
  }
  ParserComponent parser_component = components.front();
  components.pop();
  if( ( parentTokenIdx != -1 ) && ( parser_component.component == _T( "this" ) ) ) {
    return FindAIMatches( tree, components, result, parentTokenIdx,
                          isPrefix, caseSensitive, use_inheritance,
                          kindMask, search_scope );
  }
  bool isLastComponent = components.empty();
  wxString searchtext = parser_component.component;
  TokenIdxSet local_result;
  GenerateResultSet( tree, searchtext, parentTokenIdx, local_result, ( caseSensitive || !isLastComponent ), ( isLastComponent && !isPrefix ), kindMask );
  for( TokenIdxSet::const_iterator it = local_result.begin(); it != local_result.end(); it++ ) {
    int id = *it;
    const Token* token = tree->at( id );
    if( !token ) {
      continue;
    }
    if( token->m_IsOperator ) {
      continue;
    }
    if( token->m_TokenKind == tkEnum ) {
      result.insert( id );
      for( TokenIdxSet::const_iterator tis_it = token->m_Children.begin();
           tis_it != token->m_Children.end();
           tis_it++ ) {
        result.insert( *tis_it );
      }
      continue;
    }
    if( !searchtext.IsEmpty() && ( parser_component.tokenType != pttSearchText ) && !token->m_BaseType.IsEmpty() ) {
      TokenIdxSet type_result;
      std::queue<ParserComponent> type_components;
      wxString actual = token->m_BaseType;
      BreakUpComponents( actual, type_components );
      TokenIdxSet temp_search_scope;
      if( search_scope ) {
        temp_search_scope = *search_scope;
      }
      if( parentTokenIdx != -1 ) {
        const Token* parentToken = tree->at( parentTokenIdx );
        if( parentToken ) {
          const Token* parent = tree->at( parentToken->m_ParentIndex );
          if( parent ) {
            temp_search_scope.insert( parent->m_Index );
          }
        }
      }
      TokenIdxSet::const_iterator itsearch;
      itsearch = temp_search_scope.begin();
      while( !search_scope || itsearch != temp_search_scope.end() ) {
        const Token* parent = tree->at( *itsearch );
        do {
          if( FindAIMatches( tree,
                             type_components,
                             type_result,
                             parent ? parent->m_Index : -1,
                             true,
                             false,
                             false,
                             tkClass | tkNamespace | tkTypedef | tkEnum,
                             &temp_search_scope ) != 0 ) {
            break;
          }
          if( !parent ) {
            break;
          }
          parent = tree->at( parent->m_ParentIndex );
        } while( true );
        ++itsearch;
      }
      if( !type_result.empty() ) {
        id = *( type_result.begin() );
        if( type_result.size() > 1 ) {
          TokenIdxSet::const_iterator tis_it = type_result.begin();
          ++tis_it;
          while( tis_it != type_result.end() ) {
            std::queue<ParserComponent> lcomp = components;
            FindAIMatches( tree, lcomp, result, *tis_it, isPrefix, caseSensitive, use_inheritance,  kindMask, search_scope );
            ++tis_it;
          }
        }
      }
    }
    if( isLastComponent ) {
      result.insert( id );
    } else
    { FindAIMatches( tree, components, result, id, isPrefix, caseSensitive, use_inheritance, kindMask, search_scope ); }
  }
  return result.size();
}

void NativeParserBase::FindCurrentFunctionScope( TokenTree*        tree,
    const TokenIdxSet& procResult,
    TokenIdxSet&       scopeResult ) {
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  for( TokenIdxSet::const_iterator it = procResult.begin(); it != procResult.end(); ++it ) {
    const Token* token = tree->at( *it );
    if( !token ) {
      continue;
    }
    if( token->m_TokenKind == tkClass ) {
      scopeResult.insert( *it );
    } else {
      if( token->m_TokenKind & tkAnyFunction && token->HasChildren() ) {
        scopeResult.insert( *it );
      }
      scopeResult.insert( token->m_ParentIndex );
    }
  }
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
}

void NativeParserBase::CleanupSearchScope( TokenTree*   tree,
    TokenIdxSet* searchScope ) {
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  for( TokenIdxSet::const_iterator it = searchScope->begin(); it != searchScope->end(); ) {
    const Token* token = tree->at( *it );
    if( !token || !( token->m_TokenKind & ( tkNamespace | tkClass | tkTypedef | tkAnyFunction ) ) ) {
      searchScope->erase( it++ );
    } else
    { ++it; }
  }
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
  searchScope->insert( -1 );
}

void NativeParserBase::GetCallTipHighlight( const wxString& calltip, int* start, int* end, int typedCommas ) {
  int pos = 0;
  int paramsCloseBracket = calltip.length() - 1;
  int nest = 0;
  int commas = 0;
  *start = FindFunctionOpenParenthesis( calltip ) + 1;
  *end = 0;
  while( true ) {
    wxChar c = calltip.GetChar( pos++ );
    if( c == '\0' ) {
      break;
    } else if( c == '(' ) {
      ++nest;
    } else if( c == ')' ) {
      --nest;
      if( nest == 0 ) {
        paramsCloseBracket = pos - 1;
      }
    } else if( c == ',' && nest == 1 ) {
      ++commas;
      if( commas == typedCommas + 1 ) {
        *end = pos - 1;
        return;
      }
      *start = pos;
    }
  }
  if( *end == 0 ) {
    *end = paramsCloseBracket;
  }
}

int NativeParserBase::FindFunctionOpenParenthesis( const wxString& calltip ) {
  int nest = 0;
  for( size_t i = calltip.length(); i > 0; --i ) {
    wxChar c = calltip[i - 1];
    if( c == _T( '(' ) ) {
      --nest;
      if( nest == 0 ) {
        return i - 1;
      }
    } else if( c == _T( ')' ) ) {
      ++nest;
    }
  }
  return -1;
}

wxString NativeParserBase::GetCCToken( wxString& line, ParserTokenType& tokenType, OperatorType& tokenOperatorType ) {
  tokenType = pttSearchText;
  tokenOperatorType = otOperatorUndefined;
  if( line.IsEmpty() ) {
    return wxEmptyString;
  }
  tokenOperatorType = otOperatorUndefined;
  unsigned int startAt = FindCCTokenStart( line );
  wxString res = GetNextCCToken( line, startAt, tokenOperatorType );
  if( startAt == line.Len() ) {
    line.Clear();
  } else {
    startAt = AfterWhitespace( startAt, line );
    if( IsOperatorDot( startAt, line ) ) {
      tokenType = pttClass;
      line.Remove( 0, startAt + 1 );
    } else if( IsOperatorEnd( startAt, line ) ) {
      if( IsOperatorPointer( startAt, line ) && !res.IsEmpty() && tokenOperatorType != otOperatorSquare ) {
        tokenOperatorType = otOperatorPointer;
      }
      if( line.GetChar( startAt ) == ':' ) {
        tokenType = pttNamespace;
      } else
      { tokenType = pttClass; }
      line.Remove( 0, startAt + 1 );
    } else
    { line.Clear(); }
  }
  if( tokenOperatorType == otOperatorParentheses ) {
    tokenType = pttFunction;
  }
  return res;
}

unsigned int NativeParserBase::FindCCTokenStart( const wxString& line ) {
  int startAt = line.Len() - 1;
  int nest = 0;
  bool repeat = true;
  while( repeat ) {
    repeat = false;
    startAt = BeginOfToken( startAt, line );
    if( IsOperatorDot( startAt, line ) ) {
      --startAt;
      repeat = true;
    } else if( IsOperatorEnd( startAt, line ) ) {
      startAt -= 2;
      repeat = true;
    }
    if( repeat ) {
      startAt = BeforeWhitespace( startAt, line );
      if( IsClosingBracket( startAt, line ) ) {
        ++nest;
        while( ( --startAt >= 0 ) && ( nest != 0 ) ) {
          switch( line.GetChar( startAt ) ) {
            case ']':
            case ')':
              ++nest;
              --startAt;
              break;
            case '[':
            case '(':
              --nest;
              --startAt;
              break;
            default:
              break;
          }
          startAt = BeforeWhitespace( startAt, line );
          if( IsClosingBracket( startAt, line ) ) {
            ++nest;
          }
          if( IsOpeningBracket( startAt, line ) ) {
            --nest;
          }
        }
        startAt = BeforeToken( startAt, line );
      }
    }
  }
  ++startAt;
  startAt = AfterWhitespace( startAt, line );
  return startAt;
}

wxString NativeParserBase::GetNextCCToken( const wxString& line, unsigned int& startAt, OperatorType& tokenOperatorType ) {
  wxString res;
  int nest = 0;
  if( ( startAt < line.Len() ) && ( line.GetChar( startAt ) == '(' ) ) {
    while( ( startAt < line.Len() ) && ( ( line.GetChar( startAt ) == '*' ) || ( line.GetChar( startAt ) == '&' ) || ( line.GetChar( startAt ) == '(' ) ) ) {
      if( line.GetChar( startAt ) == '(' ) {
        ++nest;
      }
      if( line.GetChar( startAt ) == _T( '*' ) ) {
        tokenOperatorType = otOperatorStar;
      }
      ++startAt;
    }
  }
  while( InsideToken( startAt, line ) ) {
    res << line.GetChar( startAt );
    ++startAt;
  }
  while( ( nest > 0 ) && ( startAt < line.Len() ) ) {
    if( line.GetChar( startAt ) == '(' ) {
      ++nest;
    } else if( line.GetChar( startAt ) == ')' ) {
      --nest;
    }
    ++startAt;
  }
  startAt = AfterWhitespace( startAt, line );
  if( IsOpeningBracket( startAt, line ) ) {
    if( line.GetChar( startAt ) == _T( '(' ) ) {
      tokenOperatorType = otOperatorParentheses;
    } else if( line.GetChar( startAt ) == _T( '[' ) ) {
      tokenOperatorType = otOperatorSquare;
    }
    ++nest;
    while( ( startAt < line.Len() - 1 ) && ( nest != 0 ) ) {
      ++startAt;
      switch( line.GetChar( startAt ) ) {
        case ']':
        case ')':
          --nest;
          ++startAt;
          break;
        case '[':
          tokenOperatorType = otOperatorSquare;
        case '(':
          ++nest;
          ++startAt;
          break;
        default:
          break;
      }
      startAt = AfterWhitespace( startAt, line );
      if( IsOpeningBracket( startAt, line ) ) {
        ++nest;
      }
      if( IsClosingBracket( startAt, line ) ) {
        --nest;
        if( nest == 0 ) {
          ++startAt;
        }
      }
    }
  }
  if( IsOperatorBegin( startAt, line ) ) {
    ++startAt;
  }
  return res;
}

void NativeParserBase::RemoveLastFunctionChildren( TokenTree* tree,
    int&       lastFuncTokenIdx ) {
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  Token* token = tree->at( lastFuncTokenIdx );
  if( token ) {
    lastFuncTokenIdx = -1;
    if( token->m_TokenKind & tkAnyFunction ) {
      token->DeleteAllChildren();
    }
  }
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
}

size_t NativeParserBase::BreakUpComponents( const wxString& actual, std::queue<ParserComponent>& components ) {
  ParserTokenType tokenType;
  wxString tmp = actual;
  OperatorType tokenOperatorType;
  while( true ) {
    wxString tok = GetCCToken( tmp, tokenType, tokenOperatorType );
    ParserComponent pc;
    pc.component = tok;
    pc.tokenType = tokenType;
    pc.tokenOperatorType = tokenOperatorType;
    if( s_DebugSmartSense ) {
      wxString tokenTypeString;
      switch( tokenType ) {
        case( pttFunction ): {
          tokenTypeString = _T( "Function" );
          break;
        }
        case( pttClass ): {
          tokenTypeString = _T( "Class" );
          break;
        }
        case( pttNamespace ): {
          tokenTypeString = _T( "Namespace" );
          break;
        }
        case( pttSearchText ): {
          tokenTypeString = _T( "SearchText" );
          break;
        }
        case( pttUndefined ):
        default:
        { tokenTypeString = _T( "Undefined" ); }
      }
    }
    if( !tok.IsEmpty() || ( tokenType == pttSearchText && !components.empty() ) ) {
      components.push( pc );
    }
    if( tokenType == pttSearchText ) {
      break;
    }
  }
  return 0;
}

size_t NativeParserBase::ResolveExpression( TokenTree* tree, std::queue<ParserComponent> components,
    const TokenIdxSet& searchScope, TokenIdxSet& result, bool caseSense, bool isPrefix ) {
  m_TemplateMap.clear();
  if( components.empty() ) {
    return 0;
  }
  TokenIdxSet initialScope;
  if( !searchScope.empty() ) {
    initialScope = searchScope;
  } else {
    initialScope.insert( -1 );
  }
  while( !components.empty() ) {
    TokenIdxSet initialResult;
    ParserComponent subComponent = components.front();
    components.pop();
    wxString searchText = subComponent.component;
    if( searchText == _T( "this" ) ) {
      initialScope.erase( -1 );
      TokenIdxSet tempInitialScope = initialScope;
      CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
      for( TokenIdxSet::const_iterator it = tempInitialScope.begin();
           it != tempInitialScope.end(); ++it ) {
        const Token* token = tree->at( *it );
        if( token && ( token->m_TokenKind != tkClass ) ) {
          initialScope.erase( *it );
        }
      }
      CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
      if( !initialScope.empty() ) {
        continue;
      } else
      { break; }
    }
    CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
    if( components.empty() ) {
      GenerateResultSet( tree, searchText, initialScope, initialResult, caseSense, isPrefix );
    } else
    { GenerateResultSet( tree, searchText, initialScope, initialResult, true, false ); }
    CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
    initialScope.clear();
    if( !initialResult.empty() ) {
      bool locked = false;
      for( TokenIdxSet::const_iterator it = initialResult.begin(); it != initialResult.end(); ++it ) {
        const size_t id = ( *it );
        wxString actualTypeStr;
        int parentIndex = -1;
        bool isFuncOrVar = false;
        if( locked )
          CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
          CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
          locked = true;
        const Token* token = tree->at( id );
        if( !token ) {
          continue;
        }
        if( token->m_IsOperator && ( m_LastComponent.tokenType != pttNamespace ) ) {
          continue;
        }
        if( !token->m_TemplateMap.empty() ) {
          m_TemplateMap = token->m_TemplateMap;
        }
        isFuncOrVar = !searchText.IsEmpty() && ( subComponent.tokenType != pttSearchText ) && !token->m_BaseType.IsEmpty();
        if( isFuncOrVar ) {
          actualTypeStr = token->m_BaseType;
          parentIndex = token->m_Index;
        }
        CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
        locked = false;
        if( isFuncOrVar ) {
          TokenIdxSet actualTypeScope;
          if( searchScope.empty() ) {
            actualTypeScope.insert( -1 );
          } else {
            CollectSearchScopes( searchScope, actualTypeScope, tree );
            CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
            const Token* currentTokenParent = tree->at( parentIndex );
            while( true ) {
              if( !currentTokenParent )
              { break; }
              actualTypeScope.insert( currentTokenParent->m_Index );
              currentTokenParent = tree->at( currentTokenParent->m_ParentIndex );
            }
            CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
          }
          TokenIdxSet actualTypeResult;
          ResolveActualType( tree, actualTypeStr, actualTypeScope, actualTypeResult );
          if( !actualTypeResult.empty() ) {
            for( TokenIdxSet::const_iterator it2 = actualTypeResult.begin();
                 it2 != actualTypeResult.end();
                 ++it2 ) {
              initialScope.insert( *it2 );
              CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
              const Token* typeToken = tree->at( *it2 );
              if( typeToken && !typeToken->m_TemplateMap.empty() )
              { m_TemplateMap = typeToken->m_TemplateMap; }
              CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
              AddTemplateAlias( tree, *it2, actualTypeScope, initialScope );
            }
          } else
          { ResolveTemplateMap( tree, actualTypeStr, actualTypeScope, initialScope ); }
          continue;
        }
        initialScope.insert( id );
      }
      if( locked )
        CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
      } else {
      initialScope.clear();
      break;
    }
    if( subComponent.tokenOperatorType != otOperatorUndefined ) {
      TokenIdxSet operatorResult;
      ResolveOperator( tree, subComponent.tokenOperatorType, initialScope, searchScope, operatorResult );
      if( !operatorResult.empty() ) {
        initialScope = operatorResult;
      }
    }
    if( subComponent.tokenType != pttSearchText ) {
      m_LastComponent = subComponent;
    }
  }
  if( !initialScope.empty() ) {
    AddConstructors( tree, initialScope, result );
  }
  return result.size();
}

void NativeParserBase::AddConstructors( TokenTree *tree, const TokenIdxSet& source, TokenIdxSet& dest ) {
  for( TokenIdxSet::iterator It = source.begin(); It != source.end(); ++It ) {
    const Token* token = tree->at( *It );
    if( !token ) {
      continue;
    }
    dest.insert( *It );
    if( token->m_TokenKind == tkClass ) {
      for( TokenIdxSet::iterator chIt = token->m_Children.begin(); chIt != token->m_Children.end(); ++chIt ) {
        const Token* tk = tree->at( *chIt );
        if( tk && ( tk->m_TokenKind == tkConstructor || ( tk->m_IsOperator && tk->m_Name.EndsWith( _T( "()" ) ) ) )
            && ( tk->m_Scope == tsPublic || tk->m_Scope == tsUndefined ) ) {
          dest.insert( *chIt );
        }
      }
    }
  }
}

void NativeParserBase::ResolveOperator( TokenTree* tree,
                                        const OperatorType& tokenOperatorType,
                                        const TokenIdxSet&  tokens,
                                        const TokenIdxSet&  searchScope,
                                        TokenIdxSet& result ) {
  if( !tree || searchScope.empty() ) {
    return;
  }
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  TokenIdxSet opInitialScope;
  for( TokenIdxSet::const_iterator it = tokens.begin(); it != tokens.end(); ++it ) {
    int id = ( *it );
    const Token* token = tree->at( id );
    if( token && ( token->m_TokenKind == tkClass || token->m_TokenKind == tkTypedef ) ) {
      opInitialScope.insert( id );
    }
  }
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
  if( opInitialScope.empty() ) {
    return;
  }
  wxString operatorStr;
  switch( tokenOperatorType ) {
    case otOperatorParentheses:
      operatorStr = _T( "operator()" );
      break;
    case otOperatorSquare:
      operatorStr = _T( "operator[]" );
      break;
    case otOperatorPointer:
      operatorStr = _T( "operator->" );
      break;
    case otOperatorStar:
      operatorStr = _T( "operator*" );
      break;
    case otOperatorUndefined:
    default:
      break;
  }
  if( operatorStr.IsEmpty() ) {
    return;
  }
  TokenIdxSet opInitialResult;
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  GenerateResultSet( tree, operatorStr, opInitialScope, opInitialResult );
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
  CollectSearchScopes( searchScope, opInitialScope, tree );
  if( opInitialResult.empty() ) {
    return;
  }
  for( TokenIdxSet::const_iterator it = opInitialResult.begin(); it != opInitialResult.end(); ++it ) {
    CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
    wxString type;
    const Token* token = tree->at( ( *it ) );
    if( token ) {
      type = token->m_BaseType;
    }
    CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
    if( type.IsEmpty() ) {
      continue;
    }
    TokenIdxSet typeResult;
    ResolveActualType( tree, type, opInitialScope, typeResult );
    if( !typeResult.empty() ) {
      for( TokenIdxSet::const_iterator pTypeResult = typeResult.begin();
           pTypeResult != typeResult.end();
           ++pTypeResult ) {
        result.insert( *pTypeResult );
        AddTemplateAlias( tree, *pTypeResult, opInitialScope, result );
      }
    } else
    { ResolveTemplateMap( tree, type, opInitialScope, result ); }
  }
}

size_t NativeParserBase::ResolveActualType( TokenTree* tree,
    wxString searchText,
    const TokenIdxSet& searchScope,
    TokenIdxSet& result ) {
  std::queue<ParserComponent> typeComponents;
  BreakUpComponents( searchText, typeComponents );
  if( !typeComponents.empty() ) {
    TokenIdxSet initialScope;
    if( !searchScope.empty() ) {
      initialScope = searchScope;
    } else
    { initialScope.insert( -1 ); }
    CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
    while( !typeComponents.empty() ) {
      TokenIdxSet initialResult;
      ParserComponent component = typeComponents.front();
      typeComponents.pop();
      wxString actualTypeStr = component.component;
      GenerateResultSet( tree, actualTypeStr, initialScope, initialResult, true, false, 0xFFFF );
      if( !initialResult.empty() ) {
        initialScope.clear();
        for( TokenIdxSet::const_iterator it = initialResult.begin(); it != initialResult.end(); ++it ) {
          initialScope.insert( *it );
        }
      } else {
        initialScope.clear();
        break;
      }
    }
    CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
    if( !initialScope.empty() ) {
      result = initialScope;
    }
  }
  return result.size();
}

void NativeParserBase::ResolveTemplateMap( TokenTree* tree,
    const wxString& searchStr, const TokenIdxSet& actualTypeScope, TokenIdxSet& initialScope ) {
  if( actualTypeScope.empty() ) {
    return;
  }
  wxString actualTypeStr = searchStr;
  std::map<wxString, wxString>::const_iterator it = m_TemplateMap.find( actualTypeStr );
  if( it != m_TemplateMap.end() ) {
    actualTypeStr = it->second;
    TokenIdxSet actualTypeResult;
    ResolveActualType( tree, actualTypeStr, actualTypeScope, actualTypeResult );
    if( !actualTypeResult.empty() ) {
      for( TokenIdxSet::const_iterator it2 = actualTypeResult.begin(); it2 != actualTypeResult.end(); ++it2 ) {
        initialScope.insert( *it2 );
      }
    }
  }
}

void NativeParserBase::AddTemplateAlias( TokenTree* tree, const int& id, const TokenIdxSet& actualTypeScope, TokenIdxSet& initialScope ) {
  if( !tree || actualTypeScope.empty() ) {
    return;
  }
  wxString actualTypeStr;
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  const Token* typeToken = tree->at( id );
  if( typeToken &&  typeToken->m_TokenKind == tkTypedef && !typeToken->m_TemplateAlias.IsEmpty() ) {
    actualTypeStr = typeToken->m_TemplateAlias;
  }
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
  std::map<wxString, wxString>::const_iterator it = m_TemplateMap.find( actualTypeStr );
  if( it != m_TemplateMap.end() ) {
    actualTypeStr = it->second;
    if( actualTypeStr.Last() == _T( '&' ) || actualTypeStr.Last() == _T( '*' ) ) {
      actualTypeStr.RemoveLast();
    }
    TokenIdxSet actualTypeResult;
    ResolveActualType( tree, actualTypeStr, actualTypeScope, actualTypeResult );
    if( !actualTypeResult.empty() ) {
      for( TokenIdxSet::const_iterator it2 = actualTypeResult.begin(); it2 != actualTypeResult.end(); ++it2 ) {
        initialScope.insert( *it2 );
      }
    }
  }
}

size_t NativeParserBase::GenerateResultSet( TokenTree* tree, const wxString& target,
    int parentIdx, TokenIdxSet& result, bool caseSens, bool isPrefix, short int kindMask ) {
  Token* parent = tree->at( parentIdx );
  if( parent ) {
    for( TokenIdxSet::const_iterator it = parent->m_Children.begin(); it != parent->m_Children.end(); ++it ) {
      const Token* token = tree->at( *it );
      if( token && MatchType( token->m_TokenKind, kindMask ) ) {
        if( MatchText( token->m_Name, target, caseSens, isPrefix ) ) {
          result.insert( *it );
        } else if( token && token->m_TokenKind == tkNamespace && token->m_Aliases.size() ) {
          for( size_t i = 0; i < token->m_Aliases.size(); ++i ) {
            if( MatchText( token->m_Aliases[i], target, caseSens, isPrefix ) )
            { result.insert( *it ); }
          }
        } else if( token && token->m_TokenKind == tkEnum ) {
          GenerateResultSet( tree, target, *it, result, caseSens, isPrefix, kindMask );
        }
      }
    }
    tree->RecalcInheritanceChain( parent );
    for( TokenIdxSet::const_iterator it = parent->m_Ancestors.begin(); it != parent->m_Ancestors.end(); ++it ) {
      const Token* ancestor = tree->at( *it );
      if( !ancestor ) {
        continue;
      }
      for( TokenIdxSet::const_iterator it2 = ancestor->m_Children.begin(); it2 != ancestor->m_Children.end(); ++it2 ) {
        const Token* token = tree->at( *it2 );
        if( token && MatchType( token->m_TokenKind, kindMask ) ) {
          if( MatchText( token->m_Name, target, caseSens, isPrefix ) ) {
            result.insert( *it2 );
          } else if( token && token->m_TokenKind == tkNamespace && token->m_Aliases.size() ) {
            for( size_t i = 0; i < token->m_Aliases.size(); ++i ) {
              if( MatchText( token->m_Aliases[i], target, caseSens, isPrefix ) )
              { result.insert( *it2 ); }
            }
          } else if( token && token->m_TokenKind == tkEnum ) {
            GenerateResultSet( tree, target, *it2, result, caseSens, isPrefix, kindMask );
          }
        }
      }
    }
  } else {
    const TokenList* tl = tree->GetTokens();
    for( TokenList::const_iterator it = tl->begin(); it != tl->end(); ++it ) {
      const Token* token = *it;
      if( token && token->m_ParentIndex == -1 ) {
        if( token && MatchType( token->m_TokenKind, kindMask ) ) {
          if( MatchText( token->m_Name, target, caseSens, isPrefix ) ) {
            result.insert( token->m_Index );
          } else if( token && token->m_TokenKind == tkNamespace && token->m_Aliases.size() ) {
            for( size_t i = 0; i < token->m_Aliases.size(); ++i ) {
              if( MatchText( token->m_Aliases[i], target, caseSens, isPrefix ) )
              { result.insert( token->m_Index ); }
            }
          } else if( token && token->m_TokenKind == tkEnum ) {
            GenerateResultSet( tree, target, token->m_Index, result, caseSens, isPrefix, kindMask );
          }
        }
      }
    }
  }
  return result.size();
}

size_t NativeParserBase::GenerateResultSet( TokenTree* tree, const wxString& target,
    const TokenIdxSet&  parentSet, TokenIdxSet& result, bool caseSens, bool isPrefix, M_Code_unused short int kindMask ) {
  if( !tree ) {
    return 0;
  }
  if( target.IsEmpty() ) {
    for( TokenIdxSet::const_iterator ptr = parentSet.begin(); ptr != parentSet.end(); ++ptr ) {
      size_t parentIdx = ( *ptr );
      Token* parent = tree->at( parentIdx );
      if( !parent ) {
        continue;
      }
      for( TokenIdxSet::const_iterator it = parent->m_Children.begin(); it != parent->m_Children.end(); ++it ) {
        const Token* token = tree->at( *it );
        if( !token ) {
          continue;
        }
        if( !AddChildrenOfUnnamed( tree, token, result ) ) {
          result.insert( *it );
          AddChildrenOfEnum( tree, token, result );
        }
      }
      tree->RecalcInheritanceChain( parent );
      for( TokenIdxSet::const_iterator it = parent->m_Ancestors.begin(); it != parent->m_Ancestors.end(); ++it ) {
        const Token* ancestor = tree->at( *it );
        if( !ancestor ) {
          continue;
        }
        for( TokenIdxSet::const_iterator it2 = ancestor->m_Children.begin(); it2 != ancestor->m_Children.end(); ++it2 ) {
          const Token* token = tree->at( *it2 );
          if( !token ) {
            continue;
          }
          if( !AddChildrenOfUnnamed( tree, token, result ) ) {
            result.insert( *it2 );
            AddChildrenOfEnum( tree, token, result );
          }
        }
      }
    }
  } else {
    TokenIdxSet textMatchSet, tmpMatches;
    if( tree->FindMatches( target, tmpMatches, caseSens, isPrefix ) ) {
      TokenIdxSet::const_iterator it;
      for( it = tmpMatches.begin(); it != tmpMatches.end(); ++it ) {
        const Token* token = tree->at( *it );
        if( token ) {
          textMatchSet.insert( *it );
        }
      }
    }
    if( !textMatchSet.empty() ) {
      for( TokenIdxSet::const_iterator parentIterator = parentSet.begin();
           parentIterator != parentSet.end();
           ++parentIterator ) {
        int parentIdx = ( *parentIterator );
        for( TokenIdxSet::const_iterator it = textMatchSet.begin(); it != textMatchSet.end(); ++it ) {
          const Token* token = tree->at( *it );
          if( token && ( ( token->m_ParentIndex == parentIdx ) || IsChildOfUnnamedOrEnum( tree, token->m_ParentIndex, parentIdx ) ) ) {
            result.insert( *it );
          }
          if( parentIdx != -1 ) {
            Token* tokenParent = tree->at( parentIdx );
            if( tokenParent ) {
              tree->RecalcInheritanceChain( tokenParent );
              for( TokenIdxSet::const_iterator ancestorIterator = tokenParent->m_Ancestors.begin();
                   ancestorIterator != tokenParent->m_Ancestors.end(); ++ancestorIterator ) {
                if( token && ( ( token->m_ParentIndex == ( *ancestorIterator ) )
                               || IsChildOfUnnamedOrEnum( tree, token->m_ParentIndex, ( *ancestorIterator ) ) ) )
                { result.insert( *it ); }
              }
            }
          } else if( -1 == parentIdx ) {
            const Token* parentToken = tree->at( token->m_ParentIndex );
            if( parentToken && parentToken->m_TokenKind == tkEnum )
            { result.insert( *it ); }
          }
          if( token && IsAllocator( tree, token->m_ParentIndex ) && DependsOnAllocator( tree, parentIdx ) ) {
            result.insert( *it );
          }
        }
      }
    } else {
      if( parentSet.count( -1 ) ) {
        const TokenList* tl = tree->GetTokens();
        for( TokenList::const_iterator it = tl->begin(); it != tl->end(); ++it ) {
          const Token* token = ( *it );
          if( token && token->m_TokenKind == tkNamespace && token->m_Aliases.size() ) {
            for( size_t i = 0; i < token->m_Aliases.size(); ++i ) {
              if( token->m_Aliases[i] == target )
              { result.insert( token->m_Index ); }
            }
          }
        }
      }
    }
  }
  return result.size();
}

bool NativeParserBase::IsAllocator( TokenTree* tree, const int& id ) {
  if( !tree ) {
    return false;
  }
  const Token* token = tree->at( id );
  return ( token && token->m_Name.IsSameAs( _T( "allocator" ) ) );
}

bool NativeParserBase::DependsOnAllocator( TokenTree* tree, const int& id ) {
  if( !tree ) {
    return false;
  }
  const Token* token = tree->at( id );
  if( !token ) {
    return false;
  }
  if( token->m_TemplateArgument.Find( _T( "_Alloc" ) ) != wxNOT_FOUND ) {
    return true;
  }
  if( token->m_TemplateArgument.Find( _T( "_Sequence" ) ) != wxNOT_FOUND ) {
    return true;
  }
  return DependsOnAllocator( tree, token->m_ParentIndex );
}

void NativeParserBase::CollectSearchScopes( const TokenIdxSet& searchScope, TokenIdxSet& actualTypeScope, TokenTree* tree ) {
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  for( TokenIdxSet::const_iterator pScope = searchScope.begin(); pScope != searchScope.end(); ++pScope ) {
    actualTypeScope.insert( *pScope );
    if( ( *pScope ) != -1 ) {
      const Token* token = tree->at( *pScope );
      if( !token ) {
        continue;
      }
      const Token* parent = tree->at( token->m_ParentIndex );
      while( true ) {
        if( !parent ) {
          break;
        }
        actualTypeScope.insert( parent->m_Index );
        parent = tree->at( parent->m_ParentIndex );
      }
    }
  }
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
}

int NativeParserBase::GetTokenFromCurrentLine( TokenTree* tree, const TokenIdxSet& tokens, size_t curLine, const wxString& file ) {
  int result = -1;
  bool found = false;
  if( !tree ) {
    return result;
  }
  const size_t fileIdx = tree->InsertFileOrGetIndex( file );
  const Token* classToken = nullptr;
  for( TokenIdxSet::const_iterator it = tokens.begin(); it != tokens.end(); ++it ) {
    const Token* token = tree->at( *it );
    if( !token ) {
      continue;
    }
    if( token->m_TokenKind & tkAnyFunction && token->m_ImplFileIdx == fileIdx && token->m_ImplLine <= curLine && token->m_ImplLineEnd >= curLine ) {
      result = token->m_Index;
      found = true;
    } else if( token->m_TokenKind == tkConstructor && token->m_ImplFileIdx == fileIdx && token->m_ImplLine <= curLine
               && token->m_ImplLineStart >= curLine ) {
      result = token->m_Index;
      found = true;
    } else if( token->m_TokenKind == tkClass && token->m_ImplLineStart <= curLine && token->m_ImplLineEnd >= curLine ) {
      classToken = token;
      continue;
    }
    if( found ) {
      break;
    }
  }
  if( classToken ) {
    result = classToken->m_Index;
  }
  return result;
}

void NativeParserBase::ComputeCallTip( TokenTree* tree, const TokenIdxSet& tokens, wxArrayString& items ) {
  CC_LOCKER_TRACK_TT_MTX_LOCK( s_TokenTreeMutex )
  for( TokenIdxSet::const_iterator it = tokens.begin(); it != tokens.end(); ++it ) {
    const Token* token = tree->at( *it );
    if( !token ) {
      continue;
    }
    if( token->m_TokenKind == tkVariable ) {
      TokenIdxSet classes;
      tree->FindMatches( token->m_BaseType, classes, true, false, tkClass );
      for( TokenIdxSet::const_iterator clIt = classes.begin(); clIt != classes.end(); ++clIt ) {
        const Token* tk = tree->at( *clIt );
        if( tk ) {
          token = tk;
          break;
        }
      }
    }
    if( token->m_TokenKind == tkClass ) {
      for( TokenIdxSet::iterator chIt = token->m_Children.begin();
           chIt != token->m_Children.end();
           ++chIt ) {
        const Token* tk = tree->at( *chIt );
        if( tk && ( tk->m_TokenKind == tkConstructor || ( tk->m_IsOperator && tk->m_Name.EndsWith( _T( "()" ) ) ) )
            && ( tk->m_Scope == tsPublic || tk->m_Scope == tsUndefined ) ) {
          wxString tkTip;
          if( PrettyPrintToken( tree, tk, tkTip ) ) {
            items.Add( tkTip );
          }
        }
      }
      continue;
    }
    if( token->m_TokenKind == tkMacroDef && token->m_Args.empty() ) {
      const Token* tk = tree->at( tree->TokenExists( token->m_FullType, -1, tkFunction | tkMacroDef | tkVariable ) );
      if( tk && ( ( tk->m_TokenKind ^ tkMacroDef ) || !tk->m_Args.empty() ) ) {
        token = tk;
      } else {
        Tokenizer smallTokenizer( tree );
        smallTokenizer.InitFromBuffer( token->m_FullType + _T( '\n' ) );
        tk = tree->at( tree->TokenExists( smallTokenizer.GetToken(), -1, tkFunction | tkMacroDef | tkVariable ) );
        if( tk && smallTokenizer.PeekToken().empty() ) {
          token = tk;
        }
      }
    }
    wxString tkTip;
    if( !PrettyPrintToken( tree, token, tkTip ) ) {
      tkTip = _T( "Error while pretty printing token!" );
    }
    items.Add( tkTip );
  }
  CC_LOCKER_TRACK_TT_MTX_UNLOCK( s_TokenTreeMutex )
}

bool NativeParserBase::PrettyPrintToken( TokenTree* tree, const Token* token, wxString& result, bool isRoot ) {
  wxString name = token->m_Name;
  if( token->m_TokenKind == tkVariable ) {
    const Token* tk = tree->at( tree->TokenExists( token->m_BaseType, token->m_ParentIndex, tkTypedef ) );
    if( !tk && token->m_ParentIndex != -1 ) {
      tk = tree->at( tree->TokenExists( token->m_BaseType, -1, tkTypedef ) );
    }
    if( tk && !tk->m_Args.empty() ) {
      name = token->m_Name;
      token = tk;
    }
  }
  if( ( token->m_ParentIndex != -1 ) && ( token->m_TokenKind & ( tkAnyContainer | tkAnyFunction ) ) ) {
    const Token* parentToken = tree->at( token->m_ParentIndex );
    if( !parentToken || !PrettyPrintToken( tree, parentToken, result, false ) ) {
      return false;
    }
  }
  switch( token->m_TokenKind ) {
    case tkConstructor:
      result = result + token->m_Name + token->GetFormattedArgs();
      return true;
    case tkFunction:
      result = token->m_FullType + _T( " " ) + result + token->m_Name + token->GetFormattedArgs();
      if( token->m_IsConst ) {
        result += _T( " const" );
      }
      if( token->m_IsNoExcept ) {
        result += _T( " noexcept" );
      }
      return true;
    case tkClass:
    case tkNamespace:
      if( isRoot ) {
        result += token->m_Name;
      } else
      { result += token->m_Name + _T( "::" ); }
      return true;
    case tkMacroDef:
      if( !token->GetFormattedArgs().IsEmpty() ) {
        result = _T( "#define " ) + token->m_Name + token->GetFormattedArgs();
      }
      return true;
    case tkTypedef:
      result = token->m_BaseType + _T( " " ) + result + name + token->GetFormattedArgs();
      return true;
    case tkEnum:
    case tkDestructor:
    case tkVariable:
    case tkEnumerator:
    case tkMacroUse:
    case tkAnyContainer:
    case tkAnyFunction:
    case tkUndefined:
    default:
      break;
  }
  return true;
}
