/*-------------------------------------------------------------------------
 - Copyright (c) 2024-2025 [XD-AMCC TEAM]
 - [XD-AMCC] is licensed under Mulan PSL v2.
 - You can use this software according to the terms and conditions of the Mulan PSL v2.
 - You may obtain a copy of Mulan PSL v2 at:
 -          http://license.coscl.org.cn/MulanPSL2
 - THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
 - OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
 - TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
 - See the Mulan PSL v2 for more details.
 ------------------------------------------------------------------------*/


/**
 * @brief API implementation of the class lexer(scanner).
 *
 * @author  WXQ#XDU
 * @date    2024.11
 */

#include <stdio.h>
#include <stdlib.h>

#include "../include/amcc_list.h"
#include "../include/amcc_error.h"
#include "../include/amcc_log.h"

#include "inside/_lex_inner.h"


// -----------------------------------------------------------------------
// Public functions for LEXer module
//

// a tag object for abnormal situation
static t_token_ptr EMPTY_TOKEN_ARRAY[1];
t_token_ptr * lexer_eof_tokens_default() {
    return EMPTY_TOKEN_ARRAY;
}


/** initialize global data of lexer module */
int lexer_module_init() {
    tokenpool_init();
    EMPTY_TOKEN_ARRAY[0] = token_new("<EOF>", TK_EOF, NULL, 0, 0);
    EMPTY_TOKEN_ARRAY[0]->pos.filePath = "built-in";

    log_info("*** RECOGNIZE_TOKEN_STYLE is \"%s\".", lexer_style());
    return 0;
}

/** clean global data of lexer module */
void lexer_module_clean() {
    token_destroy(EMPTY_TOKEN_ARRAY[0]);
    tokenpool_destroy();
}



// -----------------------------------------------------------------------
// public API implementation of class Lexer
//


t_lexer_ptr lexer_new(const char* inputFilePath) {
    t_lexer_ptr pThis = (t_lexer_ptr)calloc(1, sizeof(t_lexer));
    if (NULL == pThis) {
        ERROR_REPORT_MALLOC();
        return NULL;
    }

    lexer_setInputFile(pThis, inputFilePath);
    log_debug("Lexer %tx has created and initialized", pThis);
    log_info("Lexer will recognize tokens in file [%s]", pThis->inputFilePath);
    return (t_lexer_ptr)pThis;
}


void  lexer_destroy(t_lexer_ptr pThis) {
    if (NULL == pThis)
        return;

    lexer_release_resources(pThis, true);
    log_debug("Lexer %tx has destroyed", pThis);
    free(pThis);
}


// Main driver of a Lexer
int lexer_run(t_lexer_ptr pThis) {
    if (NULL == pThis) {
        ERROR_REPORT_BUG("pass a NULL argument to lexer_run()");
        return -2;
    }

    if (NULL == pThis->tokenStream) { // set in lexer_setInputFile()
        ERROR_REPORT_BUG("lexer_setInputFile() must run before lexer_run()");
        pThis->nErrors = error_get_count();
        return ( pThis->nErrors > 0 ) ? -1 : 0;
    }

// Step 1: Open the input file
    if ( lexer_open_file(pThis) < 1 ) {
        goto LABEL_EXIT_FUNC;
    }

// Step 2: Load file into a char-stream which is the input buffer object
    log_info(">>> %s(): Lexer starts to work ...", __FUNCTION__ );
    int readStatus = lexer_load_file(pThis);
    if ( readStatus < 0 ) {   // failed to allocate  memory, ...
        log_error("Failed to load file, Lexer has ABORTed");
        goto LABEL_EXIT_FUNC;
    }

// Step 3: recognize tokens if file is not empty, and append tokens to token-stream
    log_trace("Scanning the input file, recognizing tokens ...");
    if (-2 == lexer_scan_file(pThis)) {
        log_error("Failed to scan file and recognize tokens, Lexer has ABORTed");
        goto LABEL_EXIT_FUNC;
    }

// Step 4: append an explicit EOF to the end of the token stream

    t_token_ptr  eofToken = token_new("<EOF>",TK_EOF, pThis,
                                      pThis->currentPosition.line,
                                      pThis->currentPosition.column);
    if(NULL == eofToken){
        ERROR_REPORT_MALLOC();
    }
    else {  // add the new EOF token to token-stream ...
        token_set_position(eofToken, pThis->currentPosition );
        tokenstream_append(pThis->tokenStream, eofToken);
        log_trace("** append a special token < %s , [%2d : %2d] , \"%s\" > to indicate the end of input",
                  token_get_kindName(eofToken) ,
                  pThis->currentPosition.line ,
                  pThis->currentPosition.column ,
                  token_get_text(eofToken) );
    }

LABEL_EXIT_FUNC:
    // release unused resource, but hold the token stream
    lexer_release_resources(pThis, false);
    pThis->nErrors = error_get_count();
    log_info("<<< %s(): Lexer finished, found %d errors, "
             "recognized %d tokens(excluding \"<EOF>\" at the end)",
             __FUNCTION__ , error_get_count(),
             lexer_tokens_count(pThis) - 1);
    return ( pThis->nErrors > 0 ) ? -1 : 0;
}


int lexer_error_count(t_lexer_ptr pThis) {
    if (NULL == pThis)
        return 0;
    else
        return pThis->nErrors;
}

void lexer_dump_tokens(t_lexer_ptr pThis) {
    if (NULL == pThis) {
        ERROR_REPORT_BUG("pass a NULL argument to lexer_dump_tokens()");
        return;
    }

    tokenstream_dump(pThis->tokenStream);
}

int lexer_tokens_count(t_lexer_ptr pThis) {
    if (NULL == pThis || NULL == pThis->tokenStream)
        return 0;
    return tokenstream_length( pThis->tokenStream );
}

t_tokenstream_ptr lexer_get_tokenstream(t_lexer_ptr pThis) {
    return (NULL == pThis) ? NULL : pThis->tokenStream;
}

struct token_stream * lexer_GRAB_tokenstream(t_lexer_ptr pThis) {
    if (NULL == pThis)
        return NULL;

    struct token_stream * pStream = pThis->tokenStream;
    pThis->tokenStream = NULL; // release the ownership
    if ( NULL != pStream ) {
        log_info("NOTICE: CALLER MUST DESTROY the tokenstream - "
                 "Caller of lexer has grabbed it"  );
    }
    return pStream;
}
