#include "lexer.h"
#include "parser.h"
#include "semantic_analyzer.h"
#include "plan_generator.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

// 打印Token流
void print_tokens(Lexer* lexer) {
    Token* token;
    printf("Token Stream:\n");
    printf("%-15s %-20s %-10s\n", "Type", "Lexeme", "Position");
    printf("-------------------------------------------\n");
    
    while ((token = get_next_token(lexer))->type != EOF_TOKEN) {

        printf("%-15s %-20s Line:%d, Col:%d\n", 
               token_type_to_string(token->type),
               token->lexeme,
               token->line,
               token->column);
        
        if (token->type == ERROR) {
            free_token(token);
            break;
        }
        
        free_token(token);
    }
    
    free_token(token);
}

// 从文件读取SQL
char* read_sql_from_file(const char* filename) {
    FILE* file = fopen(filename, "r");
    if (!file) {
        fprintf(stderr, "Cannot open file: %s\n", filename);
        return NULL;
    }
    
    // 获取文件大小
    fseek(file, 0, SEEK_END);
    long size = ftell(file);
    fseek(file, 0, SEEK_SET);
    
    // 分配内存
    char* buffer = (char*)malloc(size + 1);
    if (!buffer) {
        fprintf(stderr, "Memory allocation failed\n");
        fclose(file);
        return NULL;
    }
    
    // 读取文件内容
    fread(buffer, 1, size, file);
    buffer[size] = '\0';
    
    fclose(file);
    return buffer;
}

// 批量测试函数
void run_batch_tests(const char* filename) {
    char* sql_content = read_sql_from_file(filename);
    if (!sql_content) {
        fprintf(stderr, "Failed to read test file: %s\n", filename);
        return;
    }
    
    // 分割文件中的多个SQL语句
    char* token = strtok(sql_content, ";");
    int test_case = 1;
    
    while (token != NULL) {
        printf("\n\n===== Test Case %d =====\n", test_case);
        printf("SQL: %s;\n", token);
        
        // 创建新的lexer和parser进行测试
        Lexer* lexer = create_lexer(token);
        Parser* parser = create_parser(lexer);
        Catalog* catalog = create_catalog();
        
        // 执行完整的编译流程
        ASTNode* ast = parse_statement(parser);
        if (ast) {
            SemanticResult result = analyze_semantics(ast, catalog);
            if (result.success) {
                PlanNode* plan = generate_plan(ast, catalog);
                if (plan) {
                    free_plan(plan);
                }
            }
            free_semantic_result(result);
            free_ast_node(ast);
        }
        
        // 释放资源
        free_parser(parser);
        free_lexer(lexer);
        free_catalog(catalog);
        
        token = strtok(NULL, ";");
        test_case++;
    }
    
    free(sql_content);
}

// 修改主函数，添加批处理测试选项
int main(int argc, char** argv) {
    if (argc > 2 && strcmp(argv[1], "--batch") == 0) {
        // 批量测试模式
        run_batch_tests(argv[2]);
    } else {
        // 原有单条SQL处理逻辑
        char* sql_input = NULL;
        
        // 从命令行参数或标准输入读取SQL
        if (argc > 1) {
            sql_input = read_sql_from_file(argv[1]);
            if (!sql_input) {
                return 1;
            }
        } else {
            printf("Enter SQL statement (end with ;):\n");
            // 读取标准输入，直到遇到分号
            char buffer[4096];
            size_t len = 0;
            char c;
            
            while ((c = getchar()) != EOF) {
                if (len >= sizeof(buffer) - 1) {
                    fprintf(stderr, "Input too long\n");
                    return 1;
                }
                buffer[len++] = c;
                if (c == ';') {
                    break;
                }
            }
            buffer[len] = '\0';
            
            sql_input = strdup(buffer);
        }
        
        printf("SQL Input:\n%s\n\n", sql_input);
        
        // 创建词法分析器
        Lexer* lexer = create_lexer(sql_input);
        
        // 打印Token流
        print_tokens(lexer);
        
        // 重新创建词法分析器进行语法分析
        free_lexer(lexer);
        lexer = create_lexer(sql_input);
        
        // 创建语法分析器
        Parser* parser = create_parser(lexer);
        
        // 解析SQL语句
        ASTNode* ast = parse_statement(parser);
        if (!ast) {
            fprintf(stderr, "Parsing failed\n");
            free_parser(parser);
            free_lexer(lexer);
            free(sql_input);
            return 1;
        }
        
        // 创建模式目录
        Catalog* catalog = create_catalog();
        
        // 执行语义分析
        SemanticResult semantic_result = analyze_semantics(ast, catalog);
        if (!semantic_result.success) {
            fprintf(stderr, "Semantic error at line %d, column %d: %s\n",
                    semantic_result.error_line, semantic_result.error_column,
                    semantic_result.error_message);
            free_semantic_result(semantic_result);
        } else {
            printf("\nSemantic analysis passed\n");
            
            // 如果是CREATE TABLE语句，添加到目录
            if (ast->type == NODE_CREATE_TABLE) {
                TableSchema* table_schema = create_table_schema_from_ast(ast);
                add_table_to_catalog(catalog, table_schema);
            }
        }
        
        // 生成执行计划
        PlanNode* plan = generate_plan(ast, catalog);
        if (plan) {
            printf("\nExecution Plan:\n");
            print_plan(plan, 0);
            
            // 释放执行计划
            free_plan(plan);
        }
        
        // 释放资源
        free_semantic_result(semantic_result);
        free_ast_node(ast);
        free_parser(parser);
        free_lexer(lexer);
        free_catalog(catalog);
        free(sql_input);
        
        return 0;
    }
    
    return 0;
}