﻿using Antlr4.Runtime;
using Antlr4.Runtime.Tree;
using c_parser;
using libC.semantic;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;

namespace libC
{
    public class tools
    {
        /// <summary>
        /// 拆分标记
        /// </summary>
        public const string CODESPLITE = "%CODE_SPLITE%";

        private static readonly Regex regClass = new Regex("\n.*class.*\n");
        public class ParserInfo
        {
            public MethodInfo exprmethod;

            public Type type_lexer
            {
                get;
                private set;
            }

            public Type type_parser
            {
                get;
                private set;
            }

            public ParserInfo(Type lexer, Type parser, string exprmethod)
            {
                type_lexer = lexer;
                type_parser = parser;
                this.exprmethod = type_parser.GetMethod(exprmethod);
            }

            public static string CalcHashString(Type lexer, Type parser, string exprmethod)
            {
                return lexer.FullName + ":" + parser.FullName + ":" + exprmethod;
            }

            public Parser GetAntlrParser(Lexer lex)
            {
                CommonTokenStream commonTokenStream = new CommonTokenStream(lex);
                return Activator.CreateInstance(type_parser, commonTokenStream) as Parser;
            }

            public Lexer GetAntlrLexer(string text)
            {
                AntlrInputStream antlrInputStream = new AntlrInputStream(text);
                return Activator.CreateInstance(type_lexer, antlrInputStream) as Lexer;
            }

            public stNode ParseAst(string text, bool dumpSrc, bool dumpLex, bool dumpAst)
            {
                RuleContext antlrnode = Parse(text, dumpSrc, dumpLex, dumpAst);
                Lexer antlrLexer = GetAntlrLexer(text);
                return stNode.From(rulenames: GetAntlrParser(antlrLexer).RuleNames, antlrnode: antlrnode, tokentypes: antlrLexer.Vocabulary);
            }

            public RuleContext Parse(string text, bool dumpSrc, bool dumpLex, bool dumpAst)
            {
                bool flag = false;
                Lexer antlrLexer = GetAntlrLexer(text);
                if (dumpSrc)
                {
                    Console.WriteLine("==src==");
                    Console.WriteLine(text);
                }

                if (dumpLex)
                {
                    Lexer antlrLexer2 = GetAntlrLexer(text);
                    Console.WriteLine("==dump lex==");
                    foreach (IToken allToken in antlrLexer2.GetAllTokens())
                    {
                        string displayName = antlrLexer.Vocabulary.GetDisplayName(allToken.Type);
                        if (displayName == "UNKNOWN__")
                        {
                            flag = true;
                        }

                        Console.WriteLine("t=" + displayName + ":" + allToken.Text + "(" + allToken.Line + "," + allToken.Column + ")");
                    }
                }

                new CommonTokenStream(antlrLexer);
                Parser antlrParser = GetAntlrParser(antlrLexer);
                ParserRuleContext parserRuleContext = exprmethod.Invoke(antlrParser, null) as ParserRuleContext;
                if (dumpAst)
                {
                    Console.WriteLine("==dump ast==");
                    DumpAst(antlrLexer, antlrParser, parserRuleContext);
                }

                if (parserRuleContext.exception != null)
                {
                    Console.WriteLine("==err at:(" + parserRuleContext.exception.OffendingToken.Line + "," + parserRuleContext.exception.OffendingToken.Column + ")" + parserRuleContext.ToString());
                    throw parserRuleContext.exception;
                }

                if (flag)
                {
                    throw new Exception("had unknown");
                }

                return parserRuleContext;
            }
        }

        private static Dictionary<string, ParserInfo> parsers = new Dictionary<string, ParserInfo>();
        /// <summary>
        /// 获取 解析信息
        /// </summary>
        /// <typeparam name="T_LEX"></typeparam>
        /// <typeparam name="T_PARSER"></typeparam>
        /// <param name="exprmethod"></param>
        /// <returns></returns>
        public static ParserInfo GetParserInfo<T_LEX, T_PARSER>(string exprmethod) where T_LEX : Lexer where T_PARSER : Parser
        {
            string key = ParserInfo.CalcHashString(typeof(T_LEX), typeof(T_PARSER), exprmethod);
            if (!parsers.ContainsKey(key))
            {
                parsers[key] = new ParserInfo(typeof(T_LEX), typeof(T_PARSER), exprmethod);
            }

            return parsers[key];
        }

        private static void DumpAst(Lexer lexer, Parser parser, IParseTree node, int deep = 0, List<int> linestyle = null)
        {
            string str = "";
            for (int i = 0; i < deep; i++)
            {
                str += " ";
                if (linestyle[i] > 0)
                {
                    if (i == deep - 1)
                    {
                        if (linestyle[i] == 1)
                        {
                            str += "|--";
                        }
                        else if (linestyle[i] == 2)
                        {
                            str += "\\--";
                            linestyle[i] = 0;
                        }
                    }
                    else
                    {
                        str += "|  ";
                    }
                }
                else
                {
                    str += "   ";
                }
            }

            _ = 0;
            RuleContext ruleContext = node as RuleContext;
            if (ruleContext == null)
            {
                ITerminalNode terminalNode = node as ITerminalNode;
                if (terminalNode != null)
                {
                    string displayName = lexer.Vocabulary.GetDisplayName(terminalNode.Symbol.Type);
                    Console.WriteLine(str + displayName + ":" + terminalNode.Symbol.Text);
                }
                else
                {
                    Console.WriteLine(str + node.GetText());
                }

                return;
            }

            if (deep == 0)
            {
                linestyle = new List<int>();
            }

            linestyle.Add((node.ChildCount > 1) ? 1 : 0);
            string str2 = parser.RuleNames[ruleContext.RuleIndex];
            Console.WriteLine(str + str2);
            for (int j = 0; j < node.ChildCount; j++)
            {
                IParseTree child = node.GetChild(j);
                if (j == node.ChildCount - 1 && linestyle[deep] == 1)
                {
                    linestyle[deep] = 2;
                }

                DumpAst(lexer, parser, child, deep + 1, linestyle);
            }

            linestyle.RemoveAt(deep);
        }


        /// <summary>
        /// 获取 C 头文件
        /// </summary>
        /// <param name="projPath">项目文件夹路径</param>
        /// <param name="outFileDatas">输出的 C 头文件列表</param>
        /// <returns></returns>
        public static bool GetCHeadFiles(string projPath, List<string> outFileDatas)
        {
            if (!Directory.Exists(projPath)) return false;
            var _list = new List<string>();
            //晒选 .h 的文件
            var files = Directory.GetFiles(projPath, "*.h", SearchOption.AllDirectories);

            //排除 C++ 的 .h
            foreach (var item in files)
            {
                //加载文件
                string codeStr = File.ReadAllText(item);
                //判断是否是 C++ 的头文件
                if (codeStr.IndexOf("namespace") != -1) continue;
                //inline
                if (codeStr.IndexOf("inline") != -1) continue;
                outFileDatas.Add(item);
                outFileDatas.Add(codeStr);
            }
            return true;
        }


        /// <summary>
        /// 语义分析器 （单个文件）
        /// </summary>
        /// <param name="filePath">文件路径字符串</param>
        /// <returns></returns>
        public static semanticAnalyzer getSemAnaly(string filePath)
        {
            var parser = libC.tools.GetParserInfo<WrapLexer, WrapParser>("prog");
            string expr = File.ReadAllText(filePath);
            //Console.WriteLine($"== 源文件 == \n {expr}");
            //var ast = parser.Parse(expr, true, true, true);
            var ast = parser.ParseAst(expr, true, true, true);
            //
            var _dic = new Dictionary<string, stNode>()
            {
                [filePath] = ast
            };
            var semAnaly = new semanticAnalyzer(_dic);

            return semAnaly;
        }

        /// <summary>
        /// 语义分析器 （C 工程目录）
        /// </summary>
        /// <param name="ProjPath">文件夹路径字符串</param>
        /// <returns></returns>
        public static semanticAnalyzer getSemAnalyByProj(string ProjPath)
        {
            var parser = libC.tools.GetParserInfo<WrapLexer, WrapParser>("prog");
            var files = new List<string>();
            var isOk = tools.GetCHeadFiles(ProjPath, files);
            if (!isOk) return null;
            var _dic = new Dictionary<string, stNode>();
            for (int i = 0, len = files.Count; i < files.Count; i += 2)
            {
                var fileP = files[i];
                var expr = files[i + 1];
                Console.WriteLine($"Parseing file : {fileP}");
                stNode ast = null;
                try
                {
                    ast = parser.ParseAst(expr, false, false, false);
                }
                catch (Exception err)
                {
                    Console.WriteLine($"解析异常 跳过:{fileP} \n error : {err.Message}");
                }
                if (ast != null) { 
                    _dic[fileP] = ast;
                }
            }

            var semAnaly = new semanticAnalyzer(_dic);
            return semAnaly;
        }
    }
}
