"use strict";
import { token, token_types } from "./tokenize.mjs";

/**
 *
 * @param {token[]} tokens
 * @returns {token[]}
 */
export function word_reduce(tokens) {
    let reduced_tokens = [];

    let buf = "", buf_line = 0, buf_start_column = Number.MAX_SAFE_INTEGER;
    for (let i of tokens) {
        if (i.type == token_types.normal) {
            buf += i.str;
            buf_line = i.line;
            buf_start_column > i.column ? buf_start_column = i.column : 0;
        }
        else {
            if (buf != "") {
                reduced_tokens.push(new token(token_types.normal, buf, buf_line, buf_start_column));
                buf = "";
                buf_line = 0;
                buf_start_column = Number.MAX_SAFE_INTEGER;
            }
            reduced_tokens.push(i);
        }
    }

    return reduced_tokens;
}

/**
 * 
 * @param {token[]} tokens 
 * @returns {token[]}
 */
function reduce_comment_and_string(tokens) {
    let reduced_tokens = [];

    let optional_status = {
        start: -1,
        in_string: 0,
        in_comment: 1,
    };
    let current_status = optional_status.start;

    let buf = new token();
    let i = 0;
    while (i < tokens.length) {
        let tk = tokens[i];
        if (current_status == optional_status.start) {
            if (tk.type == token_types.double_quote
                && ((i >= 2 && tokens[i - 1].type != token_types.back_quote)
                    || (i < 2))) {
                buf = tk;
                buf.type = token_types.string;
                current_status = optional_status.in_string;
            }
            else if (tk.type == token_types.semicolon
                && ((i >= 2 && tokens[i - 1].type != token_types.backslash)
                    || (i < 2))) {
                buf = tk;
                buf.type = token_types.comment;
                current_status = optional_status.in_comment;
            }
            else {
                reduced_tokens.push(tk);
            }
        }
        else if (current_status == optional_status.in_string) {
            if (tk.type == token_types.double_quote
                && ((i >= 1 && tokens[i - 1].type != token_types.backslash)
                    || (i < 1))) {
                // left quote
                reduced_tokens.push(new token(token_types.left_double_quote, token_types.left_double_quote.str, buf.line, buf.column));
                // string content
                buf.str = buf.str.substring(1);
                if (buf.str.length > 0) {
                    buf.column += 1;
                    reduced_tokens.push(buf);
                }
                // right quote
                tk.type = token_types.right_double_quote;
                reduced_tokens.push(tk);

                buf = new token();
                current_status = optional_status.start;
            }
            else {
                buf.str += tk.str;
            }
        }
        else if (current_status == optional_status.in_comment) {
            if ((tk.type == token_types.newline
                || tk.type == token_types.carriagereturn)
                && ((i >= 1 && tokens[i - 1].type != token_types.back_quote)
                    || (i < 1))) {
                reduced_tokens.push(buf);
                buf = new token();
                reduced_tokens.push(tk);
                current_status = optional_status.start;
            }
            else {
                buf.str += tk.str;
            }
        }
        else {
            throw Error("reduce: unknown current status");
        }
        i += 1;
    }
    return reduced_tokens;
}

/**
 * 
 * @param {token[]} tokens 
 */
export function token_reduce(tokens) {
    tokens = word_reduce(tokens);
    tokens = reduce_comment_and_string(tokens);

    return tokens;
}