/*-------------------------------------------------------------------------
 - Copyright (c) 2024-2025 [XD-AMCC TEAM]
 - [XD-AMCC] is licensed under Mulan PSL v2.
 - You can use this software according to the terms and conditions of the Mulan PSL v2.
 - You may obtain a copy of Mulan PSL v2 at:
 -          http://license.coscl.org.cn/MulanPSL2
 - THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
 - OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
 - TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
 - See the Mulan PSL v2 for more details.
 ------------------------------------------------------------------------*/


/**
 * @brief Implement the class token stream.
 *
 *
 * @author  WXQ#XDU
 * @date    2024.11
 */

#include <assert.h>
#include <stdlib.h>

#include "../include/amcc_list.h"
#include "../include/amcc_error.h"
#include "../include/amcc_log.h"
#include "../include/amcc_time.h"

#include "inside/_lex_inner.h"

// get the underlay data for tokens
#define TOKEN_LIST(stream) ((t_list*)(stream)->tokens_buffer)

int tokenstream_length(t_tokenstream_ptr pThis) {
    if (NULL == pThis || NULL == TOKEN_LIST(pThis))
        return 0;
    return list_size(TOKEN_LIST(pThis));
}


bool tokenstream_isEmpty(t_tokenstream_ptr pThis) {
    int len = tokenstream_length(pThis);
    if (len <= 0)
        return true;
    t_list_app_value firstToken = list_get_first_value(TOKEN_LIST(pThis) );
    if ( TK_EOF == token_get_kind((t_token_ptr)firstToken) )
        return true;
    else
        return false;
}

// In syntax analysis, look-ahead(peek) the next K token
t_token_ptr tokenstream_lookAheadToken(t_tokenstream_ptr pThis, int K) {
    int length = tokenstream_length(pThis);
    if (length <= 0)
        return lexer_eof_tokens_default()[0];

    int index = pThis->idxCurrentToken + K;
    if ( index < 0 )            // return built-in EOF
        return lexer_eof_tokens_default()[0];
    else if ( index >= length ) // return EOF which is bound to input file
        return pThis->arrayTokens[length -1] ;
    else
        return pThis->arrayTokens[index] ;
}

// In syntax analysis, look-ahead(peek) the next K token's kind
EnumTokenKind tokenstream_lookAhead(t_tokenstream_ptr pThis, int K) {
    t_token_ptr LT_K = tokenstream_lookAheadToken(pThis, K);
    return (NULL == LT_K)? TK_EOF : token_get_kind(LT_K);
}

// In syntax analysis, fetch the next token for consume, and eat current token
t_token_ptr tokenstream_nextToken(t_tokenstream_ptr pThis) {
    t_token_ptr nextToken = tokenstream_lookAheadToken(pThis, 1);

    if ( pThis->idxCurrentToken < (tokenstream_length(pThis) - 1) )
        ++(pThis->idxCurrentToken); // NOTICE: init-value is -1

    return nextToken;
}

// Means: nextToken + LookAheadToken(1)
t_token_ptr tokenstream_consume(t_tokenstream_ptr pThis) {
    if ( pThis->idxCurrentToken < (tokenstream_length(pThis) - 1) )
        ++(pThis->idxCurrentToken); // init-value is -1

    return tokenstream_lookAheadToken(pThis, 1);
}

int tokenstream_currentIndex(t_tokenstream_ptr pThis) {
    return pThis->idxCurrentToken;
}

void tokenstream_resetIndex(t_tokenstream_ptr pThis) {
    pThis->idxCurrentToken = -1;
}


// Creates and initializes a new token stream.
t_tokenstream_ptr tokenstream_new() {
    t_tokenstream_ptr pThis = (t_tokenstream_ptr)calloc(1,sizeof(t_tokenstream));
    if (NULL != pThis) {
        pThis->idxCurrentToken = -1; // very IMPORTANT
        pThis->arrayTokens   = NULL;
        pThis->tokens_buffer = NULL;
        log_debug("TokenStream %tx has initialized", pThis);
    }
    return pThis;
}


static void tokenstream_destroy_impl_(t_tokenstream_ptr pThis) {
    if (NULL == pThis) return;

    // free the array of pointers to token
    if ( (NULL != pThis->arrayTokens)     &&
        (pThis->arrayTokens != lexer_eof_tokens_default())  )
    {
        free( pThis->arrayTokens );
        pThis->arrayTokens = NULL;
    }

    //free the list of tokens and destroy all tokens
    if (NULL != pThis->tokens_buffer) {
        list_destroy(TOKEN_LIST(pThis) );
    }

    free(pThis);
}

void tokenstream_destroy(t_tokenstream_ptr pThis) {
    log_trace("DESTROY-ing tokenStream %tx ...", pThis);
    tokenstream_destroy_impl_(pThis);
    log_debug("DESTROY-ed  tokenStream %tx", pThis);
}

// call-back function to destroy a token
static void callback_free_token(void* pToken){
    token_destroy((t_token_ptr)pToken);
}


// Append new token to tail of this stream.
int tokenstream_append(t_tokenstream_ptr pThis, t_token_ptr pToken) {
    assert(NULL != pThis);
    assert(NULL != pToken);

    if (NULL == TOKEN_LIST(pThis)) {  // create the token list
        t_list_ptr list = list_new(callback_free_token);
                //(t_list*)malloc(sizeof(t_list));
        if(NULL == list) {
            ERROR_REPORT_MALLOC();
            return -1;
        }
        pThis->tokens_buffer = list;
    }

    // reject append any token after EOF
    t_token_ptr lastToken = (t_token_ptr)list_get_last_value(TOKEN_LIST(pThis));
    if (NULL != lastToken && TK_EOF == token_get_kind(lastToken)) {
        ERROR_REPORT_BUG("EOF already had been appended");
        return -2;
    }

    if (NULL == list_append( TOKEN_LIST(pThis), pToken)) {
        ERROR_REPORT_MALLOC();
        return -1;
    }

    if( TK_EOF == token_get_kind(pToken) ) {
        t_token_ptr* tkArray = (t_token_ptr*)list_to_array( TOKEN_LIST(pThis) );
        if( NULL == tkArray ) {
            ERROR_REPORT_MALLOC();
            return -3;
        }
        pThis->arrayTokens = tkArray;
    }

    return 0;
}

// Dump tokens into logfile
void tokenstream_dump(t_tokenstream_ptr pThis) {
    FILE * fp = fopen(amcc_get_tokenFilePath(), "wt");
    if (NULL == fp) {
        ERROR_REPORT_FILEOP(EC_WRITEFILE, amcc_get_tokenFilePath());
        return;
    }

    log_info("====> DUMPing TOKENS into file [%s]", amcc_get_tokenFilePath());

    const char * nowTimeTxt = amcc_get_timestring(NULL);
    fprintf(fp, "//  Generator: [" AMCC_PRJ_NAME "]\n");
    fprintf(fp, "// Local-Time: [%s]\n", nowTimeTxt);
    fprintf(fp, "// Input-File: [%s]\n",
            amcc_get_inputFilePath());

    int nTokens = (NULL == pThis) ? 0 : tokenstream_length(pThis);
    fprintf(fp, "==== BEGIN DUMP %d TOKENS ====\n", nTokens);
    if (NULL == pThis) {
        fprintf(fp, "WARNING:    '`v_v`'    token-stream is NULL\n" );
        goto END_FUNCTION;
    }
    fprintf(fp,"   #   L# C#    Kind-name          intValue          realValue [TEXT]\n");
    fprintf(fp,"---- ---- ----  --------------- ----------- ------------------ --------------\n");

    t_token_ptr pToken = NULL;
    for(int n = 1; n <= nTokens ; ++n) {
        if (NULL == (pToken = tokenstream_lookAheadToken(pThis, n)) )
            break;

        const char* inputText = token_get_text(pToken);
        fprintf(fp,  "%4d %4d:%-4d  %-15s "
                     "%11" AMCC_INT2STR_FMTSTR
                     " %#18.6lG" " [%s]\n" ,
                n,    pToken->pos.line,   pToken->pos.column,
                token_get_kindName(pToken),
                token_get_intValue(pToken),
                token_get_doubleValue(pToken),
                inputText);
        if( token_is_eof(pToken) )
            break;
    }

END_FUNCTION:
    fprintf(fp, "==== END OF TOKENS ====\n");
    fclose(fp);
    log_info("====> DUMPed  %d TOKENS into file [%s]",
             nTokens, amcc_get_tokenFilePath());
}
