%option noyywrap
%{
/*****************声明和选项设置  begin*****************/
#include <stdio.h>
#include <stdlib.h>

#include "lexical_analyzer.h"

int lines;
int pos_start;
int pos_end;

/*****************声明和选项设置  end*****************/

%}
 

%%
 /****请在此补全所有flex的模式与动作  start******/
"+"                                {pos_start=pos_end;pos_end=pos_start+1; return ADD;}
"-"                                {pos_start=pos_end;pos_end=pos_start+1; return SUB;}
"*"                                {pos_start=pos_end;pos_end=pos_start+1; return MUL;}
"/"                                {pos_start=pos_end;pos_end=pos_start+1; return DIV;}
"<"                                {pos_start=pos_end;pos_end=pos_start+1; return LT;}
"<="                               {pos_start=pos_end;pos_end=pos_start+2; return LTE;}
">"                                {pos_start=pos_end;pos_end=pos_start+1; return GT;}
">="                               {pos_start=pos_end;pos_end=pos_start+2; return GTE;}
"=="                               {pos_start=pos_end;pos_end=pos_start+2; return EQ;}
"!="                               {pos_start=pos_end;pos_end=pos_start+2; return NEQ;}
"="                                {pos_start=pos_end;pos_end=pos_start+1; return ASSIN;}
";"                                {pos_start=pos_end;pos_end=pos_start+1; return SEMICOLON;}
","                                {pos_start=pos_end;pos_end=pos_start+1; return COMMA;}
"("                                {pos_start=pos_end;pos_end=pos_start+1; return LPARENTHESE;}
")"                                {pos_start=pos_end;pos_end=pos_start+1; return RPARENTHESE;}
"["                                {pos_start=pos_end;pos_end=pos_start+1; return LBRACKET;}
"]"                                {pos_start=pos_end;pos_end=pos_start+1; return RBRACKET;}
"{"                                {pos_start=pos_end;pos_end=pos_start+1; return LBRACE;}
"}"                                {pos_start=pos_end;pos_end=pos_start+1; return RBRACE;}
"else"                             {pos_start=pos_end;pos_end=pos_start+4; return ELSE;}
"if"                               {pos_start=pos_end;pos_end=pos_start+2; return IF;}
"int"                              {pos_start=pos_end;pos_end=pos_start+3; return INT;}
"float"                            {pos_start=pos_end;pos_end=pos_start+5; return FLOAT;}
"return"                           {pos_start=pos_end;pos_end=pos_start+6; return RETURN;}
"void"                             {pos_start=pos_end;pos_end=pos_start+4; return VOID;}
"while"                            {pos_start=pos_end;pos_end=pos_start+5; return WHILE;}
[a-zA-Z][a-zA-Z0-9]*               {pos_start=pos_end;pos_end=pos_start+strlen(yytext); return IDENTIFIER;}
[0-9]+                             {pos_start=pos_end;pos_end=pos_start+strlen(yytext); return INTEGER;}
[0-9]+"."[0-9]*                    {pos_start=pos_end;pos_end=pos_start+strlen(yytext); return FLOATPOINT;}
"[]"                               {pos_start=pos_end;pos_end=pos_start+2;return ARRAY;}
"\n"                               {return EOL;return EOL;}
" "                                {return BLANK;}
[\t]                               {return BLANK;}
\/\*([^\*]|(\*)*[^\*\/])*(\*)*\*\/ {return COMMENT;}
.                                  {printf("[ERR]: unrecognized character %s at %d line, from %d to %d\n", yytext, lines, pos_start, pos_end); return ERROR;}

%%
/****************C代码 start*************/

/// \brief analysize a *.cminus file
///
/// \param input_file, 需要分析的文件路径
/// \param token stream, Token_Node结构体数组，用于存储分析结果，具体定义参考lexical_analyer.h

void analyzer(char* input_file, Token_Node* token_stream){
    lines = 1;
    pos_start = 1;
    pos_end = 1;
    if(!(yyin = fopen(input_file,"r"))){
        printf("[ERR] No input file\n");
        exit(1);
    }
    printf("[START]: Read from: %s\n", input_file);

    int token;
    int index = 0;

    while(token = yylex()){
        switch(token){
            case COMMENT:
                pos_start=pos_end;
                pos_end=pos_start+2;
                int i=2;
                while(yytext[i]!='*' || yytext[i+1]!='/')
                {
                   if(yytext[i]=='\n')
                   {
                     lines=lines+1;
                	 pos_end=1;
                   }
                   else
                	 pos_end=pos_end+1;
                   i=i+1;
                }
                pos_end=pos_end+2;
            case BLANK:
                pos_start=pos_end;
                pos_end=pos_start+1;
                break;
            case EOL:
                lines++;
                pos_start = 1;
                pos_end = 1;
                break;
            case ERROR:
                printf("[ERR]: unable to analysize %s at %d line, from %d to %d\n", yytext, lines, pos_start, pos_end);
            default :
                if (token == ERROR){
                    sprintf(token_stream[index].text, "[ERR]: unable to analysize %s at %d line, from %d to %d", yytext, lines, pos_start, pos_end);
                } else {
                    strcpy(token_stream[index].text, yytext);
                }
                token_stream[index].token = token;
                token_stream[index].lines = lines;
                token_stream[index].pos_start = pos_start;
                token_stream[index].pos_end = pos_end;
                index++;
                if (index >= MAX_NUM_TOKEN_NODE){
                    printf("%s has too many tokens (> %d)", input_file, MAX_NUM_TOKEN_NODE);
                    exit(1);
                }
        }
    }

    FILE *output_file;
    output_file = fopen("output_file.txt", "w"); // 打开输出文件
    if (output_file == NULL) {
        printf("[ERR] Unable to open output file\n");
        exit(1);
    }

    // 遍历 Token Node 数组，并将信息写入文件
    for (int i = 0; i < index; i++) {
        fprintf(output_file, "Token: %d, Text: %s, Lines: %d, Start: %d, End: %d\n",
                token_stream[i].token, token_stream[i].text,
                token_stream[i].lines, token_stream[i].pos_start, token_stream[i].pos_end);
    }

    fclose(output_file); // 关闭文件
    printf("[END]: Analysis completed.\n");
    return;
}
/*
int main(int argc, char *argv[])
{
    if (argc != 2) {
        printf("Usage: %s <input_file>\n", argv[0]);
        return 1;
    }

    Token_Node token_stream[MAX_NUM_TOKEN_NODE];
    analyzer(argv[1], token_stream); // 使用命令行参数指定的文件路径
    return 0;
}
*/
/****************C代码 end*************/
