// -*- mode: c++ -*-
#ifndef TOKEN_H_
#define TOKEN_H_

/* token.h - interface to tokens and tokenizing.
 *
 */

#include "common.h"
#include <limits.h>     // a pox on C++'s limits framework!

//#include <memory>
//#include <new>
#include <vector>

struct  TToken
    {
    const char*     Text;               // raw pointer into original file text
    unsigned short  TextLen;            // # of bytes in this token (no NUL termination!)
    short           Type;               // token type, as enumerated below

#if 0
    bool    operator==(std:initializer_list Compare)
        {
//            for(
        }
#endif
    enum{ MAXLEN = SHRT_MAX };          // maximum length for any token

    static TToken   Null;               // A TToken value representing no value at all
    bool            IsNull();           // returns true if this token equals TToken::Null
    unique_ptr<char>Unquote() const;    // return copy of token text NUL terminated and de-quoted
    string          Printable() const;
    operator string() { return string(Text, TextLen); }
    enum
        {
        // 0
        TKEOF,              // End-Of-File
        IDENT,              // [A-Za-z][A-Za-z0-9]+
        DOTIDENT,           // .[A-Za-z][A-Za-z0-9]+
        ACTION,             // { ... }
        SECTION,            // %%
        QUOTED,             // a quoted string
        LEFT,               // %left
        RIGHT,              // %right
        NONASSOC,           // %nonassoc
        TOKEN,              // %token
        NEWLINE,            // '\n'
        // 10
        OPERAND,            // operand blob argument for precedence decl
        TEST,               // %test
        LINE,
        START,              // %start - define start symbol(s)
        CODE,               // %{...%}
        PROGRAM,
        MATCHANY,
        NOACT,
        COMMENT,            // //-style comment
        MCOMMENT,           // /*-style (possibly multi-line) comment
        // 20
        WHITESPACE,         // does not include newline
        OPERATOR,
        ORBAR,
        COLON,
        SEMICOLON,
        PUSH,               // pseudo-token: push into sub-lexer
        POP,                // pseudo-token: pop back to previous lexer
        NOTUSED,            // useful for marking a token as not yet defined
                            // numbers > NOTUSED are all illegal tokens
        TOOLONG,            // token was too long
        ILLEGAL,            // didn't match any token type
        UNKDIR,             // unknown directive
        BADQUOTE,           // unterminated quote
        // 30
        BADCOMMENT,         // unterminated /*-style comment
        BADCODE,            // missing %} on end
        INCLUDE,            // %include

        };
    
    };

struct  TTokenIndex
    {
    TTokenIndex(int Value)  : Index(Value){};
    TTokenIndex()           : Index(-1){};

    operator int()                  { return Index; }
    TTokenIndex& operator ++()      { ++Index; return *this; }
    int          operator ++(int)   {int Result= Index++; return Result; }
    TTokenIndex& operator --()      { --Index; return *this; }
//    TTokenIndex  operator -(TTokenIndex& Other) { return TTokenIndex(Index-Other.Index); }
    int          Index;
    };
inline TTokenIndex operator-(TTokenIndex A, TTokenIndex B)
    {
    return TTokenIndex((int)A - (int)B);
    }
inline TTokenIndex operator-(TTokenIndex A, int B)
    {
    return TTokenIndex((int)A - (int)B);
    }



struct TFileTokens
    {
    string          Name;
    TTokenIndex     Begin, End, Include;
    };

struct TLineTokens : public TFileTokens
    {
    int             LineNumber;
    TTokenIndex     Start, Stop;
    };

class   TTokenSlice
    {
    friend class TTokens;
public:
    vector<TToken>::iterator begin() { return Tokens->begin()+Begin; }
    vector<TToken>::iterator end()   { return Tokens->begin()+End;   }
private:
    TTokenSlice(TTokenIndex Begin_, TTokenIndex End_, vector<TToken>&Tokens_)
        : Begin(Begin_), End(End_), Tokens(&Tokens_){};//{printf("begin=%d,end=%d\n", Begin, End);};
    TTokenIndex     Begin;
    TTokenIndex     End;
    vector<TToken>* Tokens;
    };

typedef std::vector<TToken>::iterator TTokenIter;

class   TTokens
    {
public:
    void            Tokenize(string Filename, int IncludeTokenId);
    void            Freeze() { TokensFrozen = true; }
    TFileTokens     GetFile(string Filename);
    TFileTokens     GetFile(int TokenId);
    TToken          Get(int Offset) { return Tokens[Offset]; }
    int             End() { return Tokens.size(); }
    TLineTokens     GetLine(int TokenId);
    TTokenSlice     Slice(TTokenIndex Begin=0, TTokenIndex End=-1)
        {
        assert(TokensFrozen);
        if(End<0)End=Tokens.size();
        return TTokenSlice(Begin,End,Tokens);
        }
    TTokenSlice     Slice(const TFileTokens& FileTokens)
        { return Slice(FileTokens.Begin, FileTokens.End); };
    TTokenSlice     Slice(nullptr_t) { return Slice(Files[0]->Name); };
    TTokenSlice     Slice(string Filename);
    TTokenIndex     LastTokenId() { return Tokens.size()-1; }
private:
    struct TTokenizedFile : public TFileTokens
        {
        TTokenizedFile(cache_ptr Data_) : Data(std::move(Data_)) {};
        cache_ptr       Data;
        };
    bool                TokensFrozen  = false;
    vector<TToken>      Tokens;
    vector<unique_ptr<TTokenizedFile>>  Files;
    };


#endif /* TOKEN_H_ */
