﻿#include <iostream>
#include <vector>
#include <sstream>
#include <regex>
#include <string>
#include <unordered_map>
#include <stdexcept>
#include <memory>

// 定义标记类型
enum class TokenType {
    KEYWORD,
    IDENTIFIER,
    NUMBER,
    OPERATOR,
    LEFTPARENTHESISREGEX,
    RIGHTPARENTHESISREGEX,
    WHITESPACE,
    UNKNOWN
};

// 标记类
class Token {
public:
    TokenType type;
    std::string value;
    int line;
    int column;

    Token(TokenType type, const std::string& value, int line, int column)
        : type(type), value(value), line(line), column(column) {}

    std::string toString() const {
        switch (type) {
        case TokenType::KEYWORD: return "KEYWORD(" + value + ")";
        case TokenType::IDENTIFIER: return "IDENTIFIER(" + value + ")";
        case TokenType::NUMBER: return "NUMBER(" + value + ")";
        case TokenType::OPERATOR: return "OPERATOR(" + value + ")";
        case TokenType::LEFTPARENTHESISREGEX: return "LEFTPARENTHESISREGEX(" + value + ")";
        case TokenType::RIGHTPARENTHESISREGEX: return "RIGHTPARENTHESISREGEX(" + value + ")";
        case TokenType::WHITESPACE: return "WHITESPACE(" + value + ")";
        default: return "UNKNOWN(" + value + ")";
        }
    }
};

// 词法分析器类
class Lexer {
private:
    std::unordered_map<std::string, TokenType> keywords;
    std::regex identifierRegex;
    std::regex numberRegex;
    std::regex operatorRegex;
    std::regex leftParenthesisRegex;
    std::regex rightParenthesisRegex;
    std::regex whitespaceRegex;

public:
    Lexer() :
        identifierRegex("[_a-zA-Z][_a-zA-Z0-9]*"),
        numberRegex("\\d+(\\.\\d+)?"), // 匹配整数或带小数点的数字
        operatorRegex("[+\\-*/=<>!]+"),
        leftParenthesisRegex("\\("),
        rightParenthesisRegex("\\)"),
        whitespaceRegex("\\s+")

    {

        // 定义 Python 关键字
        keywords = {
            {"if", TokenType::KEYWORD}, {"else", TokenType::KEYWORD},
            {"for", TokenType::KEYWORD}, {"while", TokenType::KEYWORD},
            {"def", TokenType::KEYWORD}, {"return", TokenType::KEYWORD},
            {"class", TokenType::KEYWORD}, {"import", TokenType::KEYWORD},
        };
    }

    std::vector<Token> tokenize(const std::string& code) {
        std::vector<Token> tokens;
        int line = 1;
        int column = 1;

        auto addToken = [&](TokenType type, const std::string& value) {
            tokens.push_back(Token(type, value, line, column));
            column += value.length();
            };


        for (size_t i = 0; i < code.size(); ++i) {
            char c = code[i];
            if (c == '\n') {
                line++;
                column = 1;
                continue;
            }
            std::string value(1, c);
            if (std::regex_match(value, whitespaceRegex)) {
                addToken(TokenType::WHITESPACE, value);
            }
            else if (std::regex_match(value, operatorRegex)) {
                addToken(TokenType::OPERATOR, value);
            }
            else if (std::regex_match(value, operatorRegex)) {
                addToken(TokenType::OPERATOR, value);
            }
            else if (std::regex_match(value, numberRegex)) {
                addToken(TokenType::NUMBER, value);
            }
            else if (std::regex_match(value, identifierRegex)) {
                std::string identifier;
                while (i < code.size() && std::regex_match(std::string(1, code[i]), identifierRegex)) {
                    identifier += code[i];
                    i++;
                }
                i--;
                if (keywords.find(identifier) != keywords.end()) {
                    addToken(keywords[identifier], identifier);
                }
                else {
                    addToken(TokenType::IDENTIFIER, identifier);
                }
            }
            else {
                addToken(TokenType::UNKNOWN, value);
            }
        }



        return tokens;
    }



};



class ASTNode {
public:
    virtual ~ASTNode() = default;
    virtual void print(int indent = 0) const = 0;
};

using ASTNodePtr = std::shared_ptr<ASTNode>;

class FunctionNode : public ASTNode {
public:
    std::string name;
    std::vector<std::string> params;
    ASTNodePtr body;

    FunctionNode(const std::string& name, const std::vector<std::string>& params, ASTNodePtr body)
        : name(name), params(params), body(body) {}

    void print(int indent = 0) const override {
        std::cout << std::string(indent, ' ') << "FunctionNode: " << name << "\n";
        std::cout << std::string(indent + 2, ' ') << "Params: ";
        for (const auto& param : params) {
            std::cout << param << " ";
        }
        std::cout << "\n";
        if (body) {
            body->print(indent + 2);
        }
    }
};

class ReturnNode : public ASTNode {
public:
    std::string expression;

    ReturnNode(const std::string& expression) : expression(expression) {}

    void print(int indent = 0) const override {
        std::cout << std::string(indent, ' ') << "ReturnNode: " << expression << "\n";
    }
};



class ParseError : public std::exception {
private:
    std::string message;
    int line;
    int column;

public:
    ParseError(const std::string& message, int line, int column)
        : message(message), line(line), column(column) {}

    const char* what() const noexcept override {
        return message.c_str();
    }

    int getLine() const noexcept {
        return line;
    }

    int getColumn() const noexcept {
        return column;
    }
};



class Parser {
private:
    std::vector<Token> tokens;
    size_t current;

    Token peek() {
        if (isAtEnd()) throw ParseError("Unexpected end of input", tokens[current - 1].line, tokens[current - 1].column);
        return tokens[current];
    }

    Token advance() {
        if (isAtEnd()) throw ParseError("Unexpected end of input", tokens[current - 1].line, tokens[current - 1].column);
        return tokens[current++];
    }

    bool match(TokenType type) {
        if (check(type)) {
            advance();
            return true;
        }
        return false;
    }

    bool check(TokenType type) {
        if (isAtEnd()) return false;
        return peek().type == type;
    }

    bool isAtEnd() {
        return current >= tokens.size();
    }

    bool isSpace() {
        return tokens[current].type == TokenType::WHITESPACE;
    }

    std::string getErrorContext(int line) {
        std::ostringstream oss;
        for (const auto& token : tokens) {
            if (token.line == line) {
                oss << token.value;
            }
            else if (token.line > line) {
                break;
            }
        }
        return oss.str();
    }

public:
    Parser(const std::vector<Token>& tokens) : tokens(tokens), current(0) {}

    ASTNodePtr parse() {
        try {
            return parseFunction();
        }
        catch (const ParseError& e) {
            std::cerr << "Parse error: " << e.what() << " at line " << e.getLine() << ", column " << e.getColumn() << std::endl;
            std::string errorContext = getErrorContext(e.getLine());
            std::cerr << errorContext << std::endl;
            std::cerr << std::string(e.getColumn() - 1, ' ') << "^" << std::endl;
            return nullptr;
        }
    }

private:
    ASTNodePtr parseFunction() {
        if (!check(TokenType::KEYWORD) || peek().value != "def") {
            Token token = peek();
            throw ParseError("Expected 'def' keyword", token.line, token.column);
        }
        advance();
        advance(); // consume 'def'
        if (!check(TokenType::IDENTIFIER)) {
            Token token = peek();
            throw ParseError("Expected function name", token.line, token.column);
        }
        std::string functionName = advance().value; // function name
        if (!check(TokenType::OPERATOR) || peek().value != "(") {
            Token token = peek();
            throw ParseError("Expected '('", token.line, token.column);
        }
        advance(); // consume '('
        std::vector<std::string> params;
        while (!check(TokenType::OPERATOR) || peek().value != ")") {
            if (!match(TokenType::IDENTIFIER)) {
                Token token = peek();
                throw ParseError("Expected parameter name", token.line, token.column);
            }
            params.push_back(advance().value);
            if (check(TokenType::OPERATOR) && peek().value == ",") {
                advance(); // consume ','
            }
        }
        advance(); // consume ')'
        if (!check(TokenType::OPERATOR) || peek().value != ":") {
            Token token = peek();
            throw ParseError("Expected ':'", token.line, token.column);
        }
        advance(); // consume ':'
        ASTNodePtr body = parseReturn();
        return std::make_shared<FunctionNode>(functionName, params, body);
    }

    ASTNodePtr parseReturn() {
        if (!check(TokenType::KEYWORD) || peek().value != "return") {
            Token token = peek();
            throw ParseError("Expected 'return' keyword", token.line, token.column);
        }
        advance(); // consume 'return'
        if (!check(TokenType::IDENTIFIER) && !check(TokenType::NUMBER)) {
            Token token = peek();
            throw ParseError("Expected return expression", token.line, token.column);
        }
        std::string expression = advance().value;
        return std::make_shared<ReturnNode>(expression);
    }
};





//int main() {
//    std::string code = "def foo(x): return x + 4.2";
//
//    Lexer lexer;
//    std::vector<Token> tokens = lexer.tokenize(code);
//
//    Parser parser(tokens);
//    try {
//        ASTNodePtr ast = parser.parse();
//        if (ast) {
//            ast->print();
//        }
//        else {
//            std::cout << "Parsing failed!" << std::endl;
//        }
//    }
//    catch (const ParseError& e) {
//        std::cerr << "Parse error: " << e.what() << " at line " << e.getLine() << ", column " << e.getColumn() << std::endl;
//    }
//
//    return 0;
//}
