﻿using System.Text;

namespace fastdb.Lexer;

/// <summary>
///     token分析
/// </summary>
public class SqlLexer
{
    public List<TokenNode> Scan(string sql)
    {
        var tokens = new List<TokenNode>();

        var buffer = new StringBuilder(128);
        var strTemp = new StringBuilder(128);
        var isStringModel = false;

        var pos = 0;
        var lineNumber = 1;
        var lineColNumber = 0;
        while (pos < sql.Length)
        {
            var c = sql[pos];


            if (c == '"') isStringModel = !isStringModel;


            if (isStringModel)
            {
                if (c != '"')
                    strTemp.Append(c);
            }
            else
            {
                if (strTemp.Length > 0)
                {
                    tokens.Add(new TokenNode
                    {
                        TokenType = TokenTypeEnum.STRING,
                        Content = strTemp.ToString(),
                        LineNumber = 1,
                        ColNumber = pos - strTemp.Length
                    });
                    strTemp.Clear();

                    pos++;
                    lineColNumber++;
                    continue;
                }

                var cStr = c.ToString();
                if (TokenKeyword.Keywords.TryGetValue(cStr, out var res))
                {
                    if (res == TokenTypeEnum.NEWLINE)
                    {
                        lineNumber++;
                        lineColNumber = 0;
                    }

                    if (buffer.Length > 0)
                    {
                        tokens.Add(new TokenNode
                        {
                            TokenType = GetTokenType(buffer.ToString()),
                            Content = buffer.ToString(),
                            LineNumber = lineNumber,
                            ColNumber = lineColNumber - buffer.Length
                        });
                        buffer.Clear();
                    }

                    tokens.Add(new TokenNode
                    {
                        TokenType = res,
                        Content = cStr,
                        LineNumber = lineNumber,
                        ColNumber = lineColNumber - cStr.Length
                    });
                }
                else
                {
                    buffer.Append(c);
                }
            }

            pos++;
            lineColNumber++;
        }

        if (buffer.Length > 0)
            tokens.Add(new TokenNode
            {
                TokenType = GetTokenType(buffer.ToString()),
                Content = buffer.ToString(),
                LineNumber = lineNumber,
                ColNumber = lineColNumber - buffer.ToString().Length
            });

        if (strTemp.Length > 0)
            tokens.Add(new TokenNode
            {
                TokenType = TokenTypeEnum.STRING,
                Content = strTemp.ToString(),
                LineNumber = lineNumber,
                ColNumber = lineColNumber - strTemp.Length
            });

        return tokens;
    }

    private static TokenTypeEnum GetTokenType(string buffer)
    {
        if (TokenKeyword.Keywords.TryGetValue(buffer, out var res))
            return res;

        return int.TryParse(buffer, out _) ? TokenTypeEnum.INT : TokenTypeEnum.Ref;
    }
}