/*-------------------------------------------------------------------------
 - Copyright (c) 2024-2025 [XD-AMCC TEAM]
 - [XD-AMCC] is licensed under Mulan PSL v2.
 - You can use this software according to the terms and conditions of the Mulan PSL v2.
 - You may obtain a copy of Mulan PSL v2 at:
 -          http://license.coscl.org.cn/MulanPSL2
 - THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
 - OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
 - TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
 - See the Mulan PSL v2 for more details.
 ------------------------------------------------------------------------*/


/**
 * @brief Implement a token-pool and a keywords table.
 *
 * @author  WXQ#XDU
 * @date    2024.11
 */

#include <stdio.h>   // for snprintf()
#include <stdlib.h>  // for bsearch()
#include <string.h>

#include "_lex_inner.h"

#define  TKNAME_UNKNOWN  "UNKNOWN"

typedef struct token_kind_info {
    EnumTokenKind  kind;
    const char*    name;
    const char*    literal;
    bool           unSupport; // false means support it
} t_token_kind_info;


// All tokens kind name info.
// Order of elements is same as EnumTokenKind, in ascending order
static t_token_kind_info tokenKindInfos [] = {
      { TK_UNSUPPORT,    "UNSUPPORT"   , "<UNSUPPORT>"   }
    , { TK_ERROR    ,    "BAD_INPUT"   , "<BAD_INPUT>"   }
    , { TK_EOF      ,    "EOF"         , "<EOF>"         }
    , { TK_EPSILON  ,    EPSILON_TEXT  , "<" EPSILON_TEXT ">" }
    , { TK_INT_LITERAL , "INT_LITERAL" , "int-literal"   }

#include "../../meta/kw_token_names.inc"   //generated by my tool
#include "../../meta/dfa_token_names.inc"  //generated by my tool

    , {TK_UNKNOWN,  TKNAME_UNKNOWN, TKNAME_UNKNOWN}
};

// comparer for bsearch()
static int comp_terminal_ (const void * p1, const void * p2) {
    const t_token_kind_info * pLeft  = (const t_token_kind_info *)p1;
    const t_token_kind_info * pRight = (const t_token_kind_info *)p2;

    return (int)(pLeft->kind) - (int)(pRight->kind);
}

static t_token_kind_info * query_tokenKind_info (EnumTokenKind tokenKind) {
    t_token_kind_info key = {tokenKind, NULL};
    t_token_kind_info * pItem = (t_token_kind_info *) bsearch ( &key,
                    tokenKindInfos,
                    sizeof(tokenKindInfos) / sizeof(tokenKindInfos[0]),
                    sizeof(tokenKindInfos[0]),
                    comp_terminal_ );
    return pItem;
}


const char * token_nameOfKind (EnumTokenKind tokenKind) {
    t_token_kind_info* pItem = query_tokenKind_info(tokenKind);
    if (NULL != pItem) {
        return pItem->name;
    }

    static char forever_buffer[50];
    snprintf(forever_buffer, sizeof(forever_buffer)-1 ,
             TKNAME_UNKNOWN "(%d)", tokenKind);
    return forever_buffer;
}

const char * token_sampleOfKind (EnumTokenKind tokenKind) {
    t_token_kind_info* pItem = query_tokenKind_info(tokenKind);
    if (NULL != pItem)
        return pItem->literal;
    else
        return "<" TKNAME_UNKNOWN ">"; // not 0 : prevent AMCC from CRASHing
}

bool token_notSupport (EnumTokenKind tokenKind) {
    t_token_kind_info* pItem = query_tokenKind_info(tokenKind);
    if (NULL != pItem)
        return pItem->unSupport;
    else
        return false; // undefined Token kind
}

#include "amcc_hashmap.h"

//------------------------------------------------------------------------
// A simple symbol table for storing all AMC and C keywords.
//
// Each element is a pair of <TEXT, TOKEN_BODY> for a keyword.
//

// Key-words table, the key is token-text
static t_hashmap_ptr  gKeywordsPtr;

t_token_body * keywords_find_kind (const char* tokText) {
    return (t_token_body *)hashmap_find(gKeywordsPtr, (t_map_key)tokText);
}

static t_hashcode pool_hash_ (t_map_key K) {
    return amcc_hashcode((const char*)K);
}

static int pool_keyCmp_ (t_map_key K1, t_map_key K2) {
    const char * key1 = (const char*)K1;
    const char * key2 = (const char*)K2;
    return strcmp(key1, key2);
}

static void pool_freeEntry_ (t_map_key K, t_map_value V) {
    t_token_body * tkBody = (t_token_body*)V;
    token_body_destroy(tkBody);
}


static void keywords_init_ ( ) {
    if (NULL != gKeywordsPtr)
        return;
    gKeywordsPtr = hashmap_new(30, pool_hash_,
                               pool_keyCmp_, pool_freeEntry_);
    int N = sizeof(tokenKindInfos) / sizeof(tokenKindInfos[0]);
    for (int i=0; i<N; ++i) {
        t_token_kind_info* pItem = &(tokenKindInfos[i]);
        if (pItem->kind <= TK_KW_MIN__) continue;
        if (pItem->kind >= TK_TYPE_MAX__) break;
        t_token_body *tkBody = token_body_new(pItem->literal, pItem->kind);
        hashmap_put(gKeywordsPtr,
                    (t_map_key)(string_text(tkBody->inputText)),
                    (t_map_value)tkBody);
    }
}

static void keywords_destroy_ ( ) {
    if (NULL == gKeywordsPtr)
        return;
    hashmap_destroy(gKeywordsPtr );
    gKeywordsPtr = NULL;
}



//------------------------------------------------------------------------
// Pool for storing all token-bodies from input file.
//
// This pool is used for REDUCE memory overhead
// in case of multiple tokens have a same word.
//
// Each element is a pair of <TEXT, TOKEN_BODY> for token.
//

// Token Pool:  shared token-bodies,  the key is token-text
static t_hashmap_ptr  gTokenPoolPtr;

static void nonKW_init_ ( ) {
    if (NULL != gTokenPoolPtr)
        return;
    gTokenPoolPtr = hashmap_new(100, pool_hash_,
                                pool_keyCmp_, pool_freeEntry_);
}

static void nonKW_destroy_ ( ) {
    if (NULL == gTokenPoolPtr)
        return;
    hashmap_destroy(gTokenPoolPtr );
    gTokenPoolPtr = NULL;
}

// Initialize the token-pool, and MUST be invoked in lexer_module_init()
void  tokenpool_init ( ) {
    keywords_init_ ();
    log_trace("  TokenPool[KeyWords] has initialized");
    nonKW_init_ ();
    log_trace("  TokenPool[Main]     has initialized");
}

// Destroy the token-pool, and MUST be invoked in lexer_module_clean()
void tokenpool_destroy ( ) {
    nonKW_destroy_ ();
    log_trace("  TokenPool[Main]     has destroyed");
    keywords_destroy_ ();
    log_trace("  TokenPool[KeyWords] has destroyed");
}

t_token_body * tokenpool_find (const char* tokText) {
    // find in keywords firstly
    t_token_body * p = (t_token_body *)hashmap_find(gKeywordsPtr, (t_map_key)tokText);
    if (NULL != p) return p;

    return (t_token_body *)hashmap_find(gTokenPoolPtr, (t_map_key)tokText);
}


t_token_body * tokenpool_put (const char* tokText, EnumTokenKind kind,
                              int line, int col, const char * filePath)
{
    // only put to pool of non-keywords
    t_token_body * tkBody = token_body_new(tokText, kind);
    t_mapresult_insert result = hashmap_put(gTokenPoolPtr,
                                            (t_map_key)(string_text(tkBody->inputText)),
                                            (t_map_value)tkBody);
    if (result.value == NULL && true == result.success) {
        // a new map entry is created
        if ( token_fill_value(tkBody) < 0) { // number value is out-of-range
            ERROR_REPORT_INPUT(filePath, line, col,
                               EC_LITERAL_OVERFLOW,
                               tokText );
        }
        return tkBody;
    } else if (result.value == NULL && false == result.success) {
        // fail on memory allocation
        token_body_destroy( tkBody );
        return NULL;
    } else {
        // the key already exists, and return the value in map
        token_body_destroy( tkBody );
        return result.value;
    }
}


EnumTokenKind token_query_id_kind (const char* text) {
    if (NULL == text || text[0] == '\0')
        return TK_ERROR;
    t_token_body * body = tokenpool_find(text);
    if (NULL != body)
        return body->kind;
    else
        return TK_ID;
}
