#ifndef LEX_H_
#define LEX_H_

#include "common.h"
#include <limits.h>     // a pox on C++'s limits framework!

//#include <functional>
#include <map>
#include <memory>
#include <new>
#include <vector>

/* TToken - a token type.
 *
 * Tokens need to be relatively small and cheap since we will be storing
 * them all in an array (per file) that stays in memory until blacc exits.
 ******************************************************************************/
struct  TToken
    {
    const char*     Text;           // raw pointer into original file text
    unsigned short  TextLen;        // # of bytes in this token (no NUL termination!)
    short           Type;           // token type, as defined in Lex

    enum{ MAXLEN = SHRT_MAX };      // maximum length for any token

    static TToken   Null;
    int             IsNull();
    unique_ptr<char>Unquote() const;
    };



typedef std::vector<TToken>                 TTokens;
typedef std::vector<TToken>::const_iterator TTokenIter;
typedef unique_ptr<std::vector<char>>       TFileChars;
//class   LexFile;
class   Lex;


// FileLoad(): lowest-level function for loading a file.
unique_ptr<std::vector<char>> FileLoad(const char* Filename);

// LexFile: a file that will be loaded and tokenized.
class LexFile
    {
public:
    friend class    Lex;
    friend class    Lexer;
    int             Tokenize();
    TToken          operator[](int Index)   { return Tokens[Index]; }
    TTokens         GetTokens()             { return Tokens; }
    TTokenIter      Begin()                 { return Tokens.begin(); }
    TTokenIter      End()                   { return Tokens.end(); }
    ~LexFile();

private:
    LexFile(const char* Filename, TFileChars Text, TToken FromInclude);

    // cruft for 'Tokens' unique_ptr vector
//    struct Deleter{void operator()(LexFile*File) const {delete File;}};

    LexFile(const LexFile& Other);            // disallow: private and no definition
    LexFile& operator=(const LexFile& Other); // disallow: private and no definition

    std::string         Filename;               // name of file
    TFileChars          FileText;               // must live as long as Tokens
    TTokens             Tokens;                 // array of tokens (result of tokenizing Buffer)
    int                 ErrorCount;             // # of error tokens in Tokens
    TToken              FromInclude;            // parent file %include token (or NULL)
    };
//typedef std::unique_ptr<LexFile> const &TTokenizedFile;
typedef LexFile *TTokenizedFile;


class Lexer
    {
public:
    Lexer(LexFile& RootFile);
    ~Lexer();
private:
    };


class Lex
    {
public:
    Lex();
   ~Lex();
    TTokenizedFile  FileLoad(const char* Filename, TToken FromInclude);
    TTokenizedFile  Loaded(const char* Filename);
    enum
        {
        TKEOF,              // End-Of-File
        IDENT,              // [A-Za-z][A-Za-z0-9]+
        ACTION,             // { ... }
        SECTION,            // %%
        QUOTED,             // a quoted string
        LEFT,               // %left
        RIGHT,              // %right
        NONASSOC,           // %nonassoc
        TOKEN,              // %token
        NEWLINE,            // '\n'
        OPERAND,
        TEST,               // %test
        LINE,
        START,
        CODE,
        PROGRAM,
        NONTERM,            // IDENT followed by ':'
        MATCHANY,
        NOACT,
        COMMENT,
        WHITESPACE,
        OPERATOR,
        ORBAR,
        SEMICOLON,
        NOTUSED,            // useful for marking a token as not yet defined
                            // numbers > NOTUSED are all illegal tokens
        TOOLONG,            // token was too long
        ILLEGAL,            // didn't match any token type
        UNKDIR,             // unknown directive
        BADQUOTE,           // unterminated quote
        INCLUDE,            // %include
        };
private:
    std::map<std::string, std::unique_ptr<LexFile> > Files;
    };

#endif
