﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;


namespace Teal.Compiler {

    /// <summary>
    /// 表示一个语法解析器。
    /// </summary>
    public sealed class Parser {

        #region 公开接口

        /// <summary>
        /// 获取或设置当前语法解析器使用的词法解析器。
        /// </summary>
        public Lexer lexer = new Lexer();

        /// <summary>
        /// 解析指定的代码。
        /// </summary>
        /// <param name="source">要解析的源。</param>
        /// <returns>返回解析后的语法树跟节点。</returns>
        public SourceUnit parse(TextReader source) {
            lexer.input = source;
            SourceUnit sourceUnit = new SourceUnit();
            parseSourceUnitBody(sourceUnit);
            return sourceUnit;
        }

        /// <summary>
        /// 解析指定的代码。
        /// </summary>
        /// <param name="source">要解析的源。</param>
        /// <param name="sourcePath">源路径。</param>
        /// <returns>返回解析后的语法树跟节点。</returns>
        public SourceUnit parse(TextReader source, string sourcePath) {
            lexer.currentLocation = new Location(sourcePath);
            lexer.input = source;
            SourceUnit sourceUnit = new SourceUnit();
            parseSourceUnitBody(sourceUnit);
            return sourceUnit;
        }

        /// <summary>
        /// 解析指定的代码片段。
        /// </summary>
        /// <param name="souceCode">要解析的源代码。</param>
        /// <returns>返回解析后的语法树跟节点。</returns>
        public SourceUnit parseString(string souceCode) {
            return parse(new StringReader(souceCode));
        }

        /// <summary>
        /// 解析指定的代码文件。
        /// </summary>
        /// <param name="fileName">要解析的源文件。</param>
        /// <param name="encoding">使用的编码。</param>
        /// <returns>返回解析后的语法树跟节点。</returns>
        public SourceUnit parseFile(string fileName, Encoding encoding = null) {
            lexer.currentLocation = new Location(fileName);
            using (StreamReader sb = encoding == null ? new StreamReader(fileName, true) : new StreamReader(fileName, encoding))
                return parse(sb);
        }

        /// <summary>
        /// 解析指定的代码文件。
        /// </summary>
        /// <param name="uri">要解析的源地址。</param>
        /// <param name="encoding">使用的编码。</param>
        /// <returns>返回解析后的语法树跟节点。</returns>
        public SourceUnit parseUri(Uri uri, Encoding encoding = null) {
            lexer.currentLocation = new Location(uri.ToString());
            System.Net.WebRequest req = System.Net.HttpWebRequest.Create(uri);
            return parseStream(req.GetResponse().GetResponseStream(), encoding);
        }

        /// <summary>
        /// 解析指定的代码文件。
        /// </summary>
        /// <param name="stream">要解析的源流。</param>
        /// <param name="encoding">使用的编码。</param>
        /// <returns>返回解析后的语法树跟节点。</returns>
        public SourceUnit parseStream(Stream stream, Encoding encoding = null) {
            return parse(encoding == null ? new StreamReader(stream, true) : new StreamReader(stream, encoding));
        }

        /// <summary>
        /// 从指定的输入解析一个表达式。
        /// </summary>
        /// <param name="input">要读取的输入。</param>
        /// <returns></returns>
        public Expression parseExpression(TextReader input) {
            lexer.input = input;
            return parseExpression();
        }

        /// <summary>
        /// 从指定的输入解析一个语句。
        /// </summary>
        /// <param name="input">要读取的输入。</param>
        /// <returns></returns>
        public Statement parseStatement(TextReader input) {
            lexer.input = input;
            return parseStatement();
        }

        #endregion

        #region 解析内部

        /// <summary>
        /// 如果下一个标记类型和指定的 <paramref name="token"/> 一致， 则读取并移动到下一个标记。
        /// </summary>
        /// <param name="token">需要判断的标记类型。</param>
        /// <returns>如果当前标记类型符合指定的操作符且移动位置， 返回 true; 否则返回 false 。</returns>
        private bool readToken(TokenType token) {
            if (lexer.peek().type == token) {
                lexer.read();
                return true;
            }
            return false;
        }

        /// <summary>
        /// 读取并移动到下一个标记。如果读取到的标记类型不是指定的 <paramref name="token"/>，则输出一个错误。
        /// </summary>
        /// <param name="token">期待的下一个操作符。</param>
        /// <param name="errorCode">报错使用的错误码。</param>
        private void expectToken(TokenType token, ErrorCode errorCode) {
            if (lexer.peek().type == token) {
                lexer.read();
                return;
            }
            Compiler.error(errorCode, String.Format("语法错误：应输入“{0}”", token.getName()), lexer.peek());
        }

        /// <summary>
        /// 读取一个标识符。
        /// </summary>
        /// <returns></returns>
        private Identifier expectIdentifier() {
            if (lexer.peek().type == TokenType.identifier) {
                return parseIdentifier();
            }

            if (lexer.peek().type.isKeyword()) {
                Compiler.error(ErrorCode.expectedIdentifier, String.Format("语法错误：应输入标识符；“{0}”是关键字，请改为“${0}”", lexer.peek().type.getName()), lexer.peek());
                return parseIdentifier();
            }
            Compiler.error(ErrorCode.expectedIdentifier, "语法错误：应输入标识符", lexer.peek());

            return new Identifier() {
                value = String.Empty
            };
        }

        /// <summary>
        /// 读取一个分号。
        /// </summary>
        private void expectSemicolon() {
            if (readToken(TokenType.semicolon)) {
                return;
            }

            if (Compiler.options.disallowMissingSemicolons) {
                Compiler.error(ErrorCode.strictExpectedSemicolon, "严格模式：应输入“;”", lexer.current.endLocation, lexer.current.endLocation);
                return;
            }

            if (!lexer.peek().hasLineTerminatorBeforeStart && lexer.peek().type != TokenType.rBrace && lexer.peek().type != TokenType.eof) {
                Compiler.error(ErrorCode.expectedSemicolon, "语法错误：应输入“;”或换行", lexer.current.endLocation, lexer.current.endLocation);
            }
        }

        private bool expectLBrace() {
            if (lexer.peek().type == TokenType.lBrace) {
                lexer.read();
                return true;
            }
            Compiler.error(ErrorCode.expectedLBrace, "语法错误：应输入“{”", lexer.peek());
            return false;
        }

        private static bool checkIdentifier(Token token, string value) {
            if (token.type != TokenType.identifier || token.buffer.Length != value.Length) {
                return false;
            }
            for (int i = 0; i < value.Length; i++) {
                if (token.buffer[i] != value[i]) {
                    return false;
                }
            }
            return true;
        }

        /// <summary>
        /// 将现有的表达式转为标识符。
        /// </summary>
        /// <param name="value"></param>
        /// <returns></returns>
        private static Identifier toIdentifier(Expression value) {
            var result = value as Identifier;
            if (result != null) {
                return result;
            }

            // expression 为 null， 表示 expression 不是表达式，已经在解析表达式时报告错误了。
            if (value != Expression.empty) {
                if (value is PredefinedTypeLiteral) {
                    Compiler.error(ErrorCode.expectedIdentifier, String.Format("语法错误：应输入标识符；“{0}”是关键字，请改用“${0}”", ((PredefinedTypeLiteral)value).type.getName()), value);
                } else {
                    Compiler.error(ErrorCode.expectedIdentifier, "语法错误：应输入标识符", value);
                }
            }

            return new Identifier() {
                value = value.ToString()
            };
        }

        /// <summary>
        /// 忽略当前成员相关的所有标记。
        /// </summary>
        private void skipToMemberDefinition() {
            // #todo 改进我
            do {
                lexer.read();

                if (lexer.peek().type.isUsedInGlobal()) {
                    if (lexer.peek().type.isPredefinedType() && lexer.current.type != TokenType.rBrace) {
                        continue;
                    }
                    break;
                }

            } while (lexer.peek().type != TokenType.eof);
        }

        /// <summary>
        /// 忽略当前行的所有标记。
        /// </summary>
        private void skipToNextLine() {
            do {
                lexer.read();
            } while (lexer.peek().type != TokenType.eof && !lexer.peek().hasLineTerminatorBeforeStart);
        }

        #endregion

        #region 解析成员

        /// <summary>
        /// 所有解析操作的入口函数。
        /// </summary>
        /// <param name="target"></param>
        private void parseSourceUnitBody(SourceUnit target) {

            // SourceUnit :
            //   ImportDirectiveList? MemberDefinitionList?

            // ImportDirectiveList :
            //   ImportDirective ...

            // 解析导入指令。
            target.importDirectives = parseImportDirectiveList();

            // 解析其它成员。
            parseMemberContainerDefinitionBody(target, false);

        }

        private ImportDirective parseImportDirectiveList() {

            // ImportDirective :
            //   import Type ;
            //   import Identifier = Type ;
            //   import Type => Identifier ;

            ImportDirective first = null, last = null;

            while (readToken(TokenType.import)) {

                var current = new ImportDirective();

                current.value = parseType();
                switch (lexer.peek().type) {
                    case TokenType.assign:
                        lexer.read();
                        current.alias = toIdentifier(current.value);
                        current.value = parseType();
                        break;
                    case TokenType.assignTo:
                        lexer.read();
                        current.alias = expectIdentifier();
                        break;
                }

                expectSemicolon();

                if (first == null) {
                    last = first = current;
                } else {
                    last = last.next = current;
                }
            }

            return first;

        }

        private void parseMemberContainerDefinitionBody(MemberContainerDefinition target, bool expectRBrack) {

            // MemberDefinitionList :
            //   MemberDefinition ...

            // MemberDefinition :
            //   FieldDefinition
            //   AliasDefinition
            //   PropertyDefinition
            //   OperatorOverloadDefinition
            //   IndexerDefinition
            //   MethodDefinition
            //   ConstructorDefinition
            //   DeconstructorDefinition
            //   TypeDefinition 
            //   NamespaceDefinition 
            //   ExtensionDefinition 

            // TypeDefinition :
            //   ClassDefinition
            //   StructDefinition
            //   EnumDefinition
            //   InterfaceDefinition

            MemberDefinition last = null;
            while (true) {

                MemberDefinition current;
                Expression returnType;
                var docComment = parseDocComment();
                var annotations = parseMemberAnnotationList();
                var modifiers = parseModifiers();
                var type = lexer.peek().type;

                // int xxx...
                if (type.isPredefinedType()) {
                    returnType = parsePredefinedType();
                    goto parseTypeMember;
                }

                switch (type) {

                    #region 标识符
                    case TokenType.identifier:

                        var currentIdentifier = parseIdentifier();

                        // A()
                        if (lexer.peek().type == TokenType.lParam) {
                            current = parseConstructor(docComment, annotations, modifiers, currentIdentifier);
                            goto parseSuccess;
                        }

                        returnType = parseType(currentIdentifier, TypeUsage.type);
                        goto parseTypeMember;

                    #endregion

                    #region 关键字开头的成员定义

                    case TokenType.@namespace:
                        current = parseNamespaceDefinition(docComment, annotations, modifiers);
                        goto parseSuccess;
                    case TokenType.@class:
                        current = parseClassDefinition(docComment, annotations, modifiers);
                        goto parseSuccess;
                    case TokenType.@struct:
                        current = parseStructDefinition(docComment, annotations, modifiers);
                        goto parseSuccess;
                    case TokenType.@interface:
                        current = parseInterfaceDefinition(docComment, annotations, modifiers);
                        goto parseSuccess;
                    case TokenType.@enum:
                        current = parseEnumDefinition(docComment, annotations, modifiers);
                        goto parseSuccess;
                    case TokenType.extend:
                        current = parseExtensionDefinition(docComment, annotations, modifiers);
                        goto parseSuccess;
                    case TokenType.func:
                        current = parseFuncDefinition(docComment, annotations, modifiers);
                        goto parseSuccess;

                    #endregion

                    #region 结束符

                    case TokenType.rBrace:
                        lexer.read();
                        if (expectRBrack) {
                            return;
                        }
                        Compiler.error(ErrorCode.unexpectedRBrace, "语法错误：多余的“}”", lexer.current);
                        continue;
                    case TokenType.eof:
                        if (expectRBrack) {
                            expectToken(TokenType.rBrace, ErrorCode.expectedRBrace);
                        }
                        return;

                    #endregion

                    #region 错误

                    case TokenType.import:
                        Compiler.error(ErrorCode.unexpectedImportDirective, "“import”指令只能在文件顶部使用", lexer.peek());
                        // 忽略之后的所有 import 语句。
                        skipToMemberDefinition();
                        continue;
                    case TokenType.semicolon:
                        Compiler.error(ErrorCode.unexpectedSemicolon, "语法错误：多余的“;”", lexer.peek());
                        lexer.read();
                        continue;
                    default:
                        Compiler.error(ErrorCode.unexpectedStatement, "语法错误：应输入函数、类或其它成员定义；所有语句都应放在函数内", lexer.peek());
                        skipToMemberDefinition();
                        continue;

                    #endregion

                }

            parseTypeMember:

                // 当前接口的显示声明。
                Expression explicitType = null;

            parseNextTypeMember:

                switch (lexer.peek().type) {

                    #region Type name
                    case TokenType.identifier:

                        Identifier currentIdentifier = parseIdentifier();
                        switch (lexer.peek().type) {

                            // Type name()
                            case TokenType.lParam:
                                current = parseMethodDefinition(docComment, annotations, modifiers, returnType, explicitType, currentIdentifier);
                                goto parseSuccess;

                            // Type name {get; set;}
                            case TokenType.lBrace:
                                current = parsePropertyDefinition(docComment, annotations, modifiers, returnType, explicitType, currentIdentifier);
                                goto parseSuccess;

                            // Type InterfaceType.name()
                            case TokenType.period:
                                explicitType = explicitType == null ? (Expression)currentIdentifier : new MemberCallExpression() {
                                    target = explicitType,
                                    argument = currentIdentifier
                                };
                                lexer.read();
                                goto parseNextTypeMember;

                            // Type name<T>()
                            case TokenType.lt:
                                if (followsWithTypeMemberDefinition()) {
                                    var currentType = parseGenericTypeExpression(currentIdentifier, TypeUsage.type);
                                    explicitType = explicitType == null ? (Expression)currentType : new MemberCallExpression() {
                                        target = explicitType,
                                        argument = currentType
                                    };
                                    lexer.read();
                                    goto parseNextTypeMember;
                                }
                                current = parseMethodDefinition(docComment, annotations, modifiers, returnType, explicitType, currentIdentifier);
                                goto parseSuccess;

                            // Type name;
                            // Type name = Value;
                            // Type name, name2;
                            default:
                                current = parseFieldDefinition(docComment, annotations, modifiers, returnType, explicitType, currentIdentifier);
                                goto parseSuccess;

                        }

                    #endregion

                    #region Type this
                    case TokenType.@this:
                        lexer.read();

                        // Type this [params] {}
                        if (lexer.peek().type == TokenType.lBrack) {
                            current = parseIndexerOperatorDefinition(docComment, annotations, modifiers, returnType, explicitType);
                            goto parseSuccess;
                        }

                        // Type this +(params) {}
                        if (lexer.peek().type.isOverloadableOperator()) {
                            current = parseOperatorOverloadDefinition(docComment, annotations, modifiers, returnType, explicitType);
                            goto parseSuccess;
                        }

                        Compiler.error(ErrorCode.invalidOperatorOverload, String.Format("“{0}”不是可重载的操作符", lexer.peek().type.getName()), lexer.peek());
                        skipToMemberDefinition();
                        continue;
                    #endregion

                    // 其它情况。
                    default:
                        expectIdentifier();
                        skipToMemberDefinition();
                        continue;

                }

            parseSuccess:
                if (target.members == null) {
                    last = target.members = current;
                } else if (current != null) {
                    last = last.next = current;
                }

            }

        }

        /// <summary>
        /// 判断之后是否存在函数名。
        /// </summary>
        /// <returns></returns>
        private bool followsWithTypeMemberDefinition() {
            lexer.mark();
            lexer.markRead();

            // 忽略之后的泛型参数。
            while (true) {
                switch (lexer.markRead().type) {
                    case TokenType.gt:

                        // 如果紧跟 . 说明这是实体泛型。
                        return lexer.markRead().type == TokenType.period;
                    case TokenType.lt:
                        return true;
                    case TokenType.colon:
                    case TokenType.eof:
                        return false;
                }
            }
        }

        private DocComment parseDocComment() {
            return lexer.peek().docComment;
        }

        private MemberDefinition.MemberAnnotation parseMemberAnnotationList() {

            // MemberAnnotationList :
            //   MemberDefinition.MemberAnnotation ...

            // MemberDefinition.MemberAnnotation :
            //   @ Type FuncCallArguments?

            MemberDefinition.MemberAnnotation first = null, last = null;

            int count = 0;

            while (readToken(TokenType.at)) {

                var current = new MemberDefinition.MemberAnnotation();

                current.target = parseType();
                if (lexer.peek().type == TokenType.lParam) {
                    current.arguments = parseArgumentList(TokenType.rParam, ErrorCode.expectedRParam);
                }

                if (first == null) {
                    last = first = current;
                } else {
                    last = last.next = current;
                }

                if (++count > 250) {
                    Compiler.error(ErrorCode.tooManyAnnoatation, "注解太多；一个成员最多只能包含 250 个注解", lexer.current);
                }

            }

            return first;

        }

        private Modifiers parseModifiers() {
            Modifiers result = Modifiers.none;

            while (lexer.peek().type.isModifier()) {
                Modifiers current;
                switch (lexer.read().type) {
                    case TokenType.@static:
                        current = Modifiers.@static;
                        break;
                    case TokenType.@virtual:
                        current = Modifiers.@virtual;
                        break;
                    case TokenType.@override:
                        current = Modifiers.@override;
                        break;
                    case TokenType.@abstract:
                        current = Modifiers.@abstract;
                        break;

                    case TokenType.@private:
                        current = Modifiers.@private;
                        break;
                    case TokenType.@public:
                        current = Modifiers.@public;
                        break;
                    case TokenType.@protected:
                        current = Modifiers.@protected;
                        break;

                    case TokenType.@new:
                        current = Modifiers.@new;
                        break;
                    case TokenType.@const:
                        current = Modifiers.@const;
                        break;
                    case TokenType.final:
                        current = Modifiers.final;
                        break;
                    case TokenType.@extern:
                        current = Modifiers.@extern;
                        break;
                    case TokenType.@volatile:
                        current = Modifiers.@volatile;
                        break;
                    default:
                        Debug.Assert(false, "TokenType.isModifier() 返回错误的结果");
                        throw new Unreachable();
                }

                if (result.hasFlag(current)) {
                    Compiler.error(ErrorCode.dumpModifiers, String.Format("“{0}”修饰符重复；应删除“{0}”", current.getName()), lexer.current);
                    continue;
                }

                if (result.getAccessibility() != Modifiers.none && current.getAccessibility() != Modifiers.none) {
                    Compiler.error(ErrorCode.tooManyAccessibility, String.Format("访问修饰符太多；应删除“{0}”", current.getName()), lexer.current);
                    continue;
                }

                result |= current;

            }

            return result;
        }

        private MethodDefinition parseMethodDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers, Expression returnType, Expression explicitType, Identifier name) {

            // MethodDefinition :
            //   Annotations? Modifiers? Type ExplicitType? Identifier GenericParameterList? ( ParameterList? ) MethodBody

            // MethodBody :
            //   ;
            //   Block

            var result = new MethodDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            result.returnType = returnType;
            result.explicitType = explicitType;
            result.name = name;

            if (readToken(TokenType.lt)) {
                result.genericParameters = parseGenericParameterList();
            }

            result.parameters = parseParameterList(TokenType.lParam, TokenType.rParam, ErrorCode.expectedRParam);
            result.body = parseMethodBody();
            return result;

        }

        private Teal.Compiler.MemberDefinition.GenericParameter parseGenericParameterList() {

            // GenericParameterList :
            //   GenericParameter
            //   GenericParameterList , GenericParameter

            // GenericParameter :
            //   Identifier
            //   Identifier : TypeConstract
            //   ...

            // TypeConstract :
            //   Type
            //   ( TypeList? )

            Teal.Compiler.MemberDefinition.GenericParameter first = null, last = null;

            int count = 0;

            do {
                var current = new Teal.Compiler.MemberDefinition.GenericParameter();

                if (!readToken(TokenType.ellipsis)) {
                    current.name = expectIdentifier();

                    if (readToken(TokenType.colon)) {
                        current.constraints = new List<Expression>();
                        bool hasParam = readToken(TokenType.lParam);
                        int j = 0;
                        do {
                            Expression type;

                            switch (lexer.peek().type) {
                                case TokenType.@class:
                                    type = new MemberDefinition.GenericParameter.ClassConstraintExpression() {
                                        startLocation = lexer.read().startLocation
                                    };
                                    break;
                                case TokenType.@struct:
                                    type = new MemberDefinition.GenericParameter.StructConstraintExpression() {
                                        startLocation = lexer.read().startLocation
                                    };
                                    break;
                                case TokenType.@enum:
                                    type = new MemberDefinition.GenericParameter.EnumConstraintExpression() {
                                        startLocation = lexer.read().startLocation
                                    };
                                    break;
                                case TokenType.@new:
                                    type = new MemberDefinition.GenericParameter.NewableConstraintExpression() {
                                        startLocation = lexer.read().startLocation,
                                    };
                                    expectToken(TokenType.lParam, ErrorCode.expectedLParam);
                                    expectToken(TokenType.rParam, ErrorCode.expectedRParam);
                                    type.endLocation = lexer.current.endLocation;
                                    break;
                                case TokenType.rParam:
                                    goto end;
                                default:
                                    type = parseType();
                                    break;
                            }

                            current.constraints.Add(type);

                            if (!hasParam) {
                                goto end;
                            }

                            if (++j > 250) {
                                Compiler.error(ErrorCode.tooManyGenericConstraints, "泛型约束太多；一个泛型参数最多只能包含 250 个约束", lexer.current);
                            }

                        } while (readToken(TokenType.comma));

                        expectToken(TokenType.rParam, ErrorCode.expectedRParam);
                    }

                }

            end:

                if (last == null) {
                    last = first = current;
                } else {
                    last = last.next = current;
                }

                if (++count > 250) {
                    Compiler.error(ErrorCode.tooManyGenericTypeParameters, "泛型参数太多；一个成员最多只能包含 250 个泛型参数", lexer.current);
                }

            } while (readToken(TokenType.comma));

            expectToken(TokenType.gt, ErrorCode.expectedGt);
            return first;

        }

        private Teal.Compiler.MemberDefinition.Parameter parseParameterList(TokenType startToken, TokenType stopToken, ErrorCode errorCode) {

            // ParameterList :
            //   Parameter
            //   ParameterList , Parameter

            // Parameter :
            //   ParameterModifers? Type Identifier VariableInitializer?
            //   ...

            // ParameterModifers :
            //   ref
            //   out
            //   params

            if (readToken(startToken)) {
                Teal.Compiler.MemberDefinition.Parameter first = null;
                Variable last = first;
                do {

                    if (readToken(stopToken)) {
                        return first;
                    }

                    var current = new Teal.Compiler.MemberDefinition.Parameter();
                    current.variableType = VariableType.inParameter;
                    switch (lexer.peek().type) {
                        case TokenType.@ref:
                            current.variableType = VariableType.refParameter;
                            lexer.read();
                            parseRestParameterModifiers();
                            goto default;
                        case TokenType.@params:
                            current.variableType = VariableType.paramsParameter;
                            lexer.read();
                            parseRestParameterModifiers();
                            goto default;
                        case TokenType.@out:
                            current.variableType = VariableType.outParameter;
                            lexer.read();
                            parseRestParameterModifiers();
                            goto default;
                        case TokenType.ellipsis:
                            current.variableType = VariableType.argListParameter;
                            current.name = new Identifier() {
                                startLocation = lexer.read().startLocation,
                                value = "...",
                                endLocation = lexer.current.endLocation
                            };
                            break;
                        default:
                            current.type = parseType();
                            current.name = expectIdentifier();

                            // 读取参数默认值。
                            if (readToken(TokenType.assign)) {
                                current.initialiser = parseExpression();
                                if (current.variableType != VariableType.inParameter) {
                                    Compiler.error(ErrorCode.invalidDefaultParameter, String.Format("含有其它修饰符的参数不允许有默认值"), current.initialiser);
                                }
                            }

                            break;
                    }

                    if (last == null) {
                        last = first = current;
                    } else {
                        last = last.next = current;
                    }

                } while (readToken(TokenType.comma));

                expectToken(stopToken, errorCode);
                return first;
            }

            expectToken(startToken, errorCode);
            return null;

        }

        private void parseRestParameterModifiers() {
            switch (lexer.peek().type) {
                case TokenType.@ref:
                case TokenType.@params:
                case TokenType.@out:
                    lexer.read();
                    Compiler.error(ErrorCode.tooManyParameterModifiers, String.Format("参数修饰符太多；应删除“{0}”", lexer.peek().type.getName()), lexer.current);
                    parseRestParameterModifiers();
                    break;
            }
        }

        private ToplevelBlock parseMethodBody() {

            // MethodBody :
            //   Block
            //   ;

            if (readToken(TokenType.lBrace)) {
                var result = new ToplevelBlock();
                result.startLocation = lexer.current.startLocation;
                parseBlockBody(result);
                return result;
            }

            expectSemicolon();
            return null;
        }

        private ConstructorDefinition parseConstructor(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers, Identifier name) {

            // ConstructorDefinition :
            //   Annotations? Modifiers? Identifier ( ParameterList? ) ConstructorInitializer? Block

            // ConstructorInitializer :
            //   : this ArgumentList
            //   : base ArgumentList

            var result = new ConstructorDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            result.name = name;
            result.parameters = parseParameterList(TokenType.lParam, TokenType.rParam, ErrorCode.expectedRParam);

            //if (readToken(TokenType.colon)) {
            //    if (lexer.peek().type != TokenType.@this && lexer.peek().type != TokenType.@base) {
            //        Compiler.error(ErrorCode.expectedThisOrBase, "语法错误：应输入“this”或“base”", lexer.peek());
            //    } else {
            //        result.initializerType = lexer.read().type;
            //        if (lexer.peek().type == TokenType.lParam) {
            //            result.initializerArguments = parseArgumentList(TokenType.rParam, ErrorCode.expectedRParam);
            //        } else {
            //            expectToken(TokenType.lParam, ErrorCode.expectedLParam);
            //        }
            //    }
            //}

            result.body = parseMethodBody();

            return result;
        }

        private PropertyDefinition parsePropertyDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers, Expression returnType, Expression explicitType, Identifier name) {

            // PropertyDefinition :
            //   Annotations? Modifiers? Type ExplicitType? Identifier { PropertyAccessorList }

            var result = new PropertyDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            result.returnType = returnType;
            result.explicitType = explicitType;
            result.name = name;
            parsePropertyBody(result);
            return result;
        }

        private IndexerDefinition parseIndexerOperatorDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers, Expression returnType, Expression explicitType) {

            // IndexerOperatorDefinition :
            //   Annotations? Modifiers? Type this [ ParameterList ] { PropertyAccessorList }

            var result = new IndexerDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            result.returnType = returnType;
            result.explicitType = explicitType;
            result.parameters = parseParameterList(TokenType.lBrack, TokenType.rBrack, ErrorCode.expectedRBrack);
            parsePropertyBody(result);
            return result;

            throw new Unreachable();

        }

        private void parsePropertyBody(PropertyOrIndexerDefinition target) {

            // PropertyAccessorList :
            //   get MethodBody
            //   set MethodBody
            //   get MethodBody set MethodBody
            //   set MethodBody get MethodBody

            if (expectLBrace()) {

                do {

                    var current = new PropertyDefinition.PropertyAccessor();
                    current.annotations = parseMemberAnnotationList();
                    current.modifiers = parseAccesibilityModifiers();

                    if (!readToken(TokenType.identifier)) {
                        Compiler.error(ErrorCode.expectedGetOrSet, "语法错误：应输入“get”或“set”", lexer.peek());
                        skipToMemberDefinition();
                        return;
                    }


                    current.name = parseIdentifier();
                    if (current.name.value == "get") {
                        if (target.getAccessor != null) {
                            Compiler.error(ErrorCode.dumpGetOrSet, "get 访问器重复", lexer.current);
                        }
                        target.getAccessor = current;
                    } else if (current.name.value == "set") {
                        if (target.setAccessor != null) {
                            Compiler.error(ErrorCode.dumpGetOrSet, "set 访问器重复", lexer.current);
                        }
                        target.setAccessor = current;
                    } else {
                        Compiler.error(ErrorCode.expectedGetOrSet, "语法错误：应输入“get”或“set”", lexer.current);
                    }

                    current.body = parseMethodBody();

                } while (!readToken(TokenType.rBrace));

            }

        }

        private Modifiers parseAccesibilityModifiers() {
            Modifiers result = Modifiers.none;
            while (lexer.peek().type.isModifier()) {
                Modifiers current;
                switch (lexer.read().type) {
                    case TokenType.@private:
                        current = Modifiers.@private;
                        break;
                    case TokenType.@public:
                        current = Modifiers.@public;
                        break;
                    case TokenType.@protected:
                        current = Modifiers.@protected;
                        break;
                    default:
                        Compiler.error(ErrorCode.invalidModifiers, String.Format("修饰符“{0}”对该项无效", lexer.current.type.getName()), lexer.current);
                        continue;
                }

                // 只能设置成一个值。
                if (result != Modifiers.none) {
                    Compiler.error(ErrorCode.tooManyAccessibility, String.Format("访问修饰符太多；应删除“{0}”", lexer.current.type.getName()), lexer.current);
                    continue;
                }

                result = current;
            }

            return result;
        }

        private OperatorDefinition parseOperatorOverloadDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers, Expression returnType, Expression explicitType) {

            // OperatorOverloadDefinition :
            //   Annotations? Modifiers? Type ExplicitType? OverloadableOperator ( ParameterList ) MethodBody

            var result = new OperatorDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            result.returnType = returnType;
            result.explicitType = explicitType;
            result.name = parseIdentifier(); // this
            result.@operator = lexer.current.type;
            result.parameters = parseParameterList(TokenType.lParam, TokenType.rParam, ErrorCode.expectedRParam);
            result.body = parseMethodBody();
            return result;

        }

        private FieldDefinition parseFieldDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers, Expression type, Expression explicitType, Identifier currentIdentifier) {

            // FieldDefinition :
            //   Annotations? Modifiers? Type VariableDefinitionList ;

            var result = new FieldDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            if (explicitType != null) {
                Compiler.error(ErrorCode.invalidExplicitType, "字段不允许显示声明接口", explicitType);
            }
            result.variables = parseVariableList(type, currentIdentifier);
            expectSemicolon();
            return result;

        }

        private MemberDefinition parseFuncDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            // FuncDefinition :
            //    Annotations? Modifiers? func Identifier ( ParameterList? ) MethodBody

            var result = new MethodDefinition();
            lexer.read(); // func
            result.name = expectIdentifier();
            expectToken(TokenType.lParam, ErrorCode.expectedLParam);

            Variable last = null;
            do {
                if (lexer.peek().type == TokenType.rParam) {
                    break;
                }

                var current = new MethodDefinition.Parameter();
                current.name = expectIdentifier();

                if (readToken(TokenType.assign)) {
                    current.initialiser = parseExpression();
                }

                if (last == null) {
                    last = result.parameters = current;
                } else {
                    last = last.next = current;
                }
            } while (readToken(TokenType.comma));

            expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            result.body = parseMethodBody();
            return result;
        }

        private NamespaceDefinition parseNamespaceDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            // NamespaceDefinition :
            //   Annotations? Modifiers? namespace IdentifierList  { MemberDefinitionList? }

            if (annotations != null) {
                //Compiler.error(ErrorCode.unexpectedAnnotation, "命名空间不允许有注解", annotations);
            }

            if (modifiers != Modifiers.none) {
                Compiler.error(ErrorCode.unexpectedModifiers, "命名空间不允许有修饰符", lexer.current);
            }

            var result = new NamespaceDefinition();
            result.docComment = docComment;
            lexer.read(); // namespace

            result.name = expectIdentifier();
            if (readToken(TokenType.period)) {
                result.names = new List<Identifier>() { result.name };
                do {
                    result.names.Add(expectIdentifier());
                } while (readToken(TokenType.period));
            }

            if (expectLBrace()) {
                parseMemberContainerDefinitionBody(result, true);
            }

            return result;

        }

        private ClassDefinition parseClassDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            // ClassDefinition :
            //   Annotations? Modifiers? class Identifier GenericParameterList? BaseTypeList? { MemberDefinitionList? }

            // BaseTypeList :
            //   : TypeList

            var result = new ClassDefinition();
            parseTypeDefinitionBody(result, docComment, annotations, modifiers);
            return result;

        }

        private StructDefinition parseStructDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            // StructDefinition :
            //   Annotations? Modifiers? struct Identifier GenericParameterList? BaseTypeList? { MemberDefinitionList? }

            var result = new StructDefinition();
            parseTypeDefinitionBody(result, docComment, annotations, modifiers);
            return result;

        }

        private InterfaceDefinition parseInterfaceDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            // InterfaceDefinition :
            //  Annotations? Modifiers? interface Identifier GenericParameterList? BaseTypeList? { MemberDefinitionList? }

            var result = new InterfaceDefinition();
            parseTypeDefinitionBody(result, docComment, annotations, modifiers);
            return result;

        }

        private void parseTypeDefinitionBody(TypeDefinition target, DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            target.docComment = docComment;
            target.annotations = annotations;
            target.modifiers = modifiers;
            lexer.read(); // class | struct | interface
            target.name = expectIdentifier();

            if (readToken(TokenType.lt)) {
                target.genericParameters = parseGenericParameterList();
            }

            if (readToken(TokenType.colon)) {
                target.baseTypes = parseBaseTypeList();
            }

            if (expectLBrace()) {
                parseMemberContainerDefinitionBody(target, true);
            }

        }

        private ExtensionDefinition parseExtensionDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            // ExtensionDefinition :
            //   Annotations? Modifiers? extend Type BaseTypeList? { MemberDefinitionList? }

            var result = new ExtensionDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            result.name = parseIdentifier(); // extend
            result.targetType = parseType();

            if (readToken(TokenType.colon)) {
                result.baseTypes = parseBaseTypeList();
            }

            if (expectLBrace()) {
                parseMemberContainerDefinitionBody(result, true);
            }

            return result;
        }

        private EnumDefinition parseEnumDefinition(DocComment docComment, MemberDefinition.MemberAnnotation annotations, Modifiers modifiers) {

            // EnumDefinition :
            //   Annotations? Modifiers? enum Identifier EnumBaseType? { EnumFieldDefinitionList? }

            // EnumBaseType :
            //   : Type

            // EnumFieldDefinitionList :
            //   EnumFieldDefinition 
            //   EnumFieldDefinitionList , EnumFieldDefinition

            // EnumFieldDefinition :
            //   Identifier
            //   Identifier = Expression

            var result = new EnumDefinition();
            result.docComment = docComment;
            result.annotations = annotations;
            result.modifiers = modifiers;
            lexer.read(); // enum
            result.name = expectIdentifier();

            if (readToken(TokenType.colon)) {
                result.baseTypes = parseBaseTypeList();
            }

            if (expectLBrace()) {

                MemberDefinition last = null;

                do {

                    if (readToken(TokenType.rBrace)) {
                        return result;
                    }

                    var current = new EnumMemberDefinition();
                    current.docComment = parseDocComment();
                    current.annotations = parseMemberAnnotationList();
                    current.name = expectIdentifier();

                    if (readToken(TokenType.assign)) {
                        current.initializer = parseExpression();
                    }

                    if (result.members == null) {
                        last = result.members = current;
                    } else {
                        last = last.next = current;
                    }

                } while (readToken(TokenType.comma));

                expectToken(TokenType.rBrace, ErrorCode.expectedRBrace);

            }
            return result;
        }

        private List<Expression> parseBaseTypeList() {

            // TypeList :
            //   Type
            //   TypeList , Type

            List<Expression> result = new List<Expression>();
            do {
                result.Add(parseType());
                if (result.Count > 250) {
                    Compiler.error(ErrorCode.tooManyBaseTypes, "基类型太多；类类型不得超过 250 个", lexer.current);
                }
            } while (readToken(TokenType.comma));
            return result;
        }

        #endregion

        #region 解析语句

        private void parseBlockBody(Block target) {

            // StatementList :
            //   Statement ...

            var statements = target.statements = new List<Statement>();

            while (true) {
                switch (lexer.peek().type) {
                    case TokenType.rBrace:
                        target.endLocation = lexer.read().endLocation;
                        return;
                    case TokenType.eof:
                        expectToken(TokenType.rBrace, ErrorCode.expectedRBrace);
                        target.endLocation = lexer.current.endLocation;
                        return;
                    default:
                        statements.Add(parseStatement());
                        continue;
                }
            }

        }

        /// <summary>
        /// 解析一个语句。
        /// </summary>
        /// <returns></returns>
        private Statement parseStatement() {

            // Statement :
            //   VariableStatement
            //   Block
            //   EmptyStatement
            //   LabeledStatement
            //   ExpressionStatement
            //   SelectionStatement
            //   IterationStatement
            //   JumpStatement
            //   TryStatement
            //   WithStatement

            // EmptyStatement :
            //   ;

            // SelectionStatement :
            //   IfStatement
            //   SwitchStatement

            // IterationStatement :
            //   ForStatement
            //   ForInStatemeent
            //   ForToStatemeent
            //   WhileStatemeent
            //   UntilStatemeent
            //   DoWhileStatemeent
            //   DoUntilStatemeent

            // JumpStatement :
            //   ContinueStatement
            //   BreakStatement
            //   ReturnStatement
            //   YieldStatement
            //   GotoStatement
            //   ThrowStatement 

            TokenType type = lexer.peek().type;

            switch (type) {

                case TokenType.identifier:

                    // Type Identifier
                    // Identifier:
                    // Identifier()

                    // 先读取当前标识符。
                    Identifier parsedIdentifier = parseIdentifier();

                    // Identifier :
                    if (lexer.peek().type == TokenType.colon) {
                        return parseLabeledStatement(parsedIdentifier);
                    }

                    return parseVariableOrExpressionStatement(parseType(parsedIdentifier, TypeUsage.declartion));

                case TokenType.lBrace:
                    return parseBlock();

                case TokenType.var:
                    return parseVariableOrExpressionStatement(parsePredefinedType());

                case TokenType.@if:
                    return parseIfStatement();

                case TokenType.@switch:
                    return parseSwitchStatement();

                case TokenType.@for:
                    return parseForStatement();

                case TokenType.@while:
                    return parseWhileStatement();

                case TokenType.@do:
                    return parseDoWhileStatement();

                case TokenType.@return:
                    return parseReturnStatement();

                case TokenType.yield:
                    return parseYieldStatement();

                case TokenType.@break:
                    return parseBreakStatement();

                case TokenType.@continue:
                    return parseContinueStatement();

                case TokenType.@goto:
                    return parseGotoStatement();

                case TokenType.@throw:
                    return parseThrowStatement();

                case TokenType.@try:
                    return parseTryStatement();

                case TokenType.@with:
                    return parseWithStatement();

                case TokenType.semicolon:
                    return new Semicolon() { startLocation = lexer.read().startLocation };
                case TokenType.eof:
                    return null;

                case TokenType.@const:
                    return parseVariableStatement(VariableType.constLocal);

                default:

                    if (type.isPredefinedType()) {
                        goto case TokenType.var;
                    }

                   // return parseExpressionStatement();
                    throw new Unreachable();
            }

        }

        /// <summary>
        /// 在已经解析到一个类型时，继续解析语句。
        /// </summary>
        /// <param name="parsedType"></param>
        /// <returns></returns>
        private Statement parseVariableOrExpressionStatement(Expression parsedType) {

            // Type Identifier;
            if (lexer.peek().type == TokenType.identifier) {
                return parseVariableStatement(parsedType, parseIdentifier());
            }

            //// Type.A;
            //return parseExpressionStatement(parsedType);

            throw new Unreachable();

        }

        private VariableStatement parseVariableStatement(Expression parsedType, Identifier parsedIdentifier) {

            var result = new VariableStatement();
            result.startLocation = parsedType.startLocation;
            result.variables = parseVariableList(parsedType, parsedIdentifier);
            expectSemicolon();
            return result;

        }

        private VariableStatement parseVariableStatement(VariableType variableType) {

            // VariableStatement :
            //   Type VariableList
            //   const Type? VariableList

            Debug.Assert(lexer.peek().type == TokenType.@const);

            VariableStatement result = new VariableStatement();
            result.startLocation = lexer.read().startLocation; //  const

            // 读取类型。
            var parsedType = parseType();
            if (lexer.peek().type == TokenType.identifier) {
                result.variables = parseVariableList(parsedType, parseIdentifier());
            } else {
                result.variables = parseVariableList(null, toIdentifier(parsedType));
            }

            for (var variable = result.variables; variable != null; variable = variable.next) {
                variable.variableType = variableType;
            }

            expectSemicolon();

            return result;
        }

        private Variable parseVariableList(Expression type, Identifier currentIdentifier) {

            // VariableList :
            //   Variable
            //   VariableList , Variable

            var first = parseVariable(type, currentIdentifier);
            var last = first;
            while (readToken(TokenType.comma)) {
                last = last.next = parseVariable(type, expectIdentifier());
            }
            return first;
        }

        private Variable parseVariable(Expression type, Identifier name) {

            // Variable :
            //   name
            //   name = Expression

            var result = new Variable();
            result.type = type;
            result.name = name;
            if (readToken(TokenType.assign)) {
                result.initialiser = parseExpression();
            }
            result.endLocation = lexer.current.endLocation;
            return result;
        }

        /// <summary>
        /// 解析变量定义或其他表达式。
        /// </summary>
        /// <returns></returns>
        private Node parseVariableOrExpression() {
            switch (lexer.peek().type) {
                case TokenType.identifier:
                    var parsedType = parseTypeExpression(parseIdentifier(), TypeUsage.declartion);

                    // 标识符后不是标识符，说明当前标识符就是需要的标识符。
                    if (lexer.peek().type == TokenType.identifier) {
                        return parseVariableList(parsedType, parseIdentifier());
                    }

                    return parseExpression(parsedType);

                default:

                    if (lexer.peek().type.isPredefinedType()) {
                        var parsedType2 = parsePredefinedType();

                        // 标识符后不是标识符，说明当前标识符就是需要解析的标识符。
                        if (lexer.peek().type == TokenType.identifier) {
                            return parseVariableList(parsedType2, parseIdentifier());
                        }
                        return parseExpression(parsedType2);
                    }

                    return parseExpression();
            }

        }

        private Statement parseEmbeddedStatement() {

            // EmbeddedStatement :
            //   Statement except VariableStatement and LabeledStatement 

            var result = parseStatement();

            if (result == null) {
                Compiler.error(ErrorCode.expectedStatement, "语法错误：应输入语句", lexer.peek());
            } else if (result is VariableStatement) {
                Compiler.error(ErrorCode.invalidVariableStatement, "嵌套语句不能是变量声明语句；应使用“{}”包围", ((VariableStatement)result).type);
            } else if (result is LabeledStatement) {
                Compiler.error(ErrorCode.invalidLabeledStatement, "嵌套语句不能是标签语句；应使用“{}”包围", ((LabeledStatement)result).label);
            }

            if (result is Semicolon && lexer.peek().type == TokenType.lBrace) {
                Compiler.warning(ErrorCode.confusedSemicolon, "此分号可能是多余的", lexer.current.startLocation, lexer.current.endLocation);
            }

            return result;
        }

        private Block parseBlock() {

            // Block :
            //   { StatementList? }

            Debug.Assert(lexer.peek().type == TokenType.lBrace);

            var result = new Block();
            result.startLocation = lexer.read().startLocation; // {
            parseBlockBody(result);
            return result;

        }

        private ExpressionStatement parseExpressionStatement() {

            // ExpressionStatement :
            //   Expression ;

            //var result = new ExpressionStatement();
            //result.body = parseExpression();
            //expectSemicolon();
            //result.endLocation = lexer.current.endLocation;
            //return result;

            throw new Unreachable();

        }

        private ExpressionStatement parseExpressionStatement(Expression parsed) {

            // ExpressionStatement :
            //   Expression ;

            //var result = new ExpressionStatement();
            //result.body = parseExpression(parsed);
            //expectSemicolon();
            //result.endLocation = lexer.current.endLocation;
            //return result;

            throw new Unreachable();

        }

        private IfStatement parseIfStatement() {

            // IfStatement :
            //   if Condition EmbeddedStatement
            //   if Condition EmbeddedStatement else EmbeddedStatement

            Debug.Assert(lexer.peek().type == TokenType.@if);

            var result = new IfStatement();
            result.startLocation = lexer.read().startLocation; // if
            result.condition = parseCondition();
            result.thenClause = parseEmbeddedStatement();
            if (readToken(TokenType.@else)) {
                result.elseClause = parseEmbeddedStatement();
            }
            return result;
        }

        private SwitchStatement parseSwitchStatement() {

            // SwitchStatement :
            //   switch Condition? { CaseClauseList }

            // CaseClauseList :
            //   CaseClause
            //   CaseClauseList CaseClause

            // CaseClause :
            //   case UnaryExpression : StatementList?
            //   case UnaryExpression, UnaryExpression : StatementList?
            //   case else : StatementList?

            Debug.Assert(lexer.peek().type == TokenType.@switch);

            var result = new SwitchStatement();
            result.startLocation = lexer.read().startLocation; // switch

            if (!Compiler.options.disallowMissingParentheses && lexer.peek().type != TokenType.lParam) {
                result.condition = parseExpression();
            } else {
                expectToken(TokenType.lParam, ErrorCode.expectedLParam);
                result.condition = parseExpression();
                expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            }

            expectLBrace();

            SwitchStatement.CaseClause last = null;

        parseNextCase:

            switch (lexer.peek().type) {

                case TokenType.@case:

                    var current = new SwitchStatement.CaseClause();
                    if (lexer.read().type == TokenType.comma) {
                        current.startLocation = lexer.peek().startLocation;//,
                    } else {
                        current.startLocation = lexer.current.startLocation;// case 
                    }

                    if (!readToken(TokenType.@else)) {
                        current.label = parseExpression();
                    }

                    current.endLocation = lexer.current.endLocation;

                    if (last == null) {
                        last = result.caseClauses = current;
                    } else {
                        last = last.next = current;
                    }

                    // 如果之后是逗号则分离当前 case 。
                    if (lexer.peek().type == TokenType.comma) {
                        goto case TokenType.@case;
                    }

                    expectToken(TokenType.colon, ErrorCode.expectedColon);

                    current.body = new Block();
                    var statements = current.body.statements = new List<Statement>();

                    while (true) {
                        switch (lexer.peek().type) {
                            case TokenType.rBrace:
                            case TokenType.@case:
                                current.body.endLocation = lexer.current.endLocation;
                                goto parseNextCase;
                            case TokenType.eof:
                                goto case TokenType.rBrace;
                            default:
                                statements.Add(parseStatement());
                                continue;
                        }
                    }

                case TokenType.rBrace:
                    lexer.read();
                    break;

                default:
                    Compiler.error(ErrorCode.expectedCase, "语法错误：应输入“case”或“}”", lexer.peek());
                    break;
            }

            return result;
        }

        private Statement parseForStatement() {

            // ForStatement :
            //   for ( VariableOrExpression? ; Expression? ; Expression? ) EmbeddedStatement
            //   for ( Type Identifier in Expression ) EmbeddedStatement
            //   for ( Type Identifier = Expression to Expression ) EmbeddedStatement

            // VariableOrExpression :
            //   Type VariableList
            //   Expression

            Debug.Assert(lexer.peek().type == TokenType.@for);

            var startLocation = lexer.read().startLocation; // for

            bool hasParentheses = readToken(TokenType.lParam);
            if (!hasParentheses && Compiler.options.disallowMissingParentheses) {
                Compiler.error(ErrorCode.strictExpectedParentheses, "严格模式: 应输入“(”", lexer.current);
            }

            if (followsWithExpression()) {

                var parsed = parseVariableOrExpression();
                Variable parsedVariable = parsed as Variable;
                if (parsedVariable != null) {

                    // for in
                    if (checkIdentifier(lexer.peek(), "in")) {
                        return parseForInStatement(startLocation, hasParentheses, parsedVariable);
                    }

                    // for to
                    if (checkIdentifier(lexer.peek(), "to")) {
                        return parseForToStatement(startLocation, hasParentheses, parsedVariable);
                    }

                }

                return parseForStatement(startLocation, hasParentheses, parsed);
            }

            return parseForStatement(startLocation, hasParentheses, null);

        }

        private ForStatement parseForStatement(Location startLocation, bool hasParam, Node initializer) {
            var result = new ForStatement();
            result.startLocation = startLocation;
            result.initializer = initializer;
            expectToken(TokenType.semicolon, ErrorCode.expectedSemicolon);
            if (lexer.peek().type != TokenType.semicolon) {
                result.condition = parseExpression();
            }
            expectToken(TokenType.semicolon, ErrorCode.expectedSemicolon);

            if (followsWithExpression()) {
                result.iterator = parseExpression();
                while (readToken(TokenType.comma)) {
                    result.iterator = new CommaExpression() {
                        left = result.iterator,
                        right = parseExpression()
                    };
                }
            }
            if (hasParam) {
                expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            }
            result.body = parseEmbeddedStatement();
            return result;
        }

        private ForToStatement parseForToStatement(Location startLocation, bool hasParam, Variable variable) {
            if (variable.initialiser == null) {
                Compiler.error(ErrorCode.expectedForToInitialiser, "语法错误：“for to”语句中变量缺少初始值", variable.name);
            }
            if (variable.next != null) {
                Compiler.error(ErrorCode.invalidForToInitialiser, "语法错误：“for to”语句中最多只能有一个变量", variable.next);
            }
            var result = new ForToStatement();
            result.startLocation = startLocation;
            result.variable = variable;
            lexer.read(); // to
            result.end = parseExpression();

            if (readToken(TokenType.semicolon) && followsWithExpression()) {
                result.iterator = parseExpression();
            }

            if (hasParam) {
                expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            }
            result.body = parseEmbeddedStatement();
            return result;
        }

        private ForInStatement parseForInStatement(Location startLocation, bool hasParam, Variable variable) {

            if (variable.initialiser != null) {
                Compiler.error(ErrorCode.unexpectedForInInitialiser, "语法错误：“for in”语句中变量不允许有初始值", variable.initialiser);
            }
            if (variable.next != null) {
                Compiler.error(ErrorCode.invalidForInInitialiser, "语法错误：“for in”语句中最多只能有一个变量", variable.next);
            }

            var result = new ForInStatement();
            result.startLocation = startLocation;
            result.variable = variable;
            lexer.read(); // in
            result.iterator = parseExpression();
            if (hasParam) {
                expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            }
            result.body = parseEmbeddedStatement();
            return result;
        }

        private WhileStatement parseWhileStatement() {

            // WhileStatement :
            //   while Condition EmbeddedStatement ;

            Debug.Assert(lexer.peek().type == TokenType.@while);

            return new WhileStatement() {
                startLocation = lexer.read().startLocation, // @while
                condition = parseCondition(),
                body = parseEmbeddedStatement()
            };
        }

        private DoWhileStatement parseDoWhileStatement() {

            // DoWhileStatement :
            //   do EmbeddedStatement while Condition ;

            Debug.Assert(lexer.peek().type == TokenType.@do);

            var startLocation = lexer.read().startLocation; // do
            var body = parseEmbeddedStatement();
            DoWhileStatement result = new DoWhileStatement();

            expectToken(TokenType.@while, ErrorCode.expectedWhile);

            result.startLocation = startLocation;
            result.body = body;
            result.condition = parseCondition();

            expectSemicolon();
            return result;
        }

        private ThrowStatement parseThrowStatement() {

            // ThrowStatement :
            //   throw Expression? ;

            Debug.Assert(lexer.peek().type == TokenType.@throw);

            var result = new ThrowStatement();
            result.startLocation = lexer.read().startLocation; // throw

            if (followsWithExpression()) {
                result.value = parseExpression();
            }

            expectSemicolon();
            result.endLocation = lexer.current.endLocation;
            return result;

        }

        private YieldStatement parseYieldStatement() {

            // YieldStatement :
            //   yield Expression ;

            Debug.Assert(lexer.peek().type == TokenType.@yield);

            YieldStatement result = new YieldStatement();
            result.startLocation = lexer.read().startLocation; // yield
            result.value = parseExpression();
            expectSemicolon();
            result.endLocation = lexer.current.endLocation;

            return result;
        }

        private Statement parseGotoStatement() {

            // GotoStatement :
            //   goto Identifier ;

            Debug.Assert(lexer.peek().type == TokenType.@goto);

            var startLocation = lexer.read().startLocation; // goto

            Statement result;

            switch (lexer.peek().type) {
                case TokenType.identifier:
                    result = new GotoLabelStatement() {
                        startLocation = startLocation,
                        target = parseIdentifier()
                    };
                    break;
                case TokenType.@case:
                    lexer.read();
                    result = new GotoCaseStatement() {
                        target = readToken(TokenType.@else) ? null : parseExpression()
                    };
                    break;
                default:
                    expectIdentifier();
                    return null;
            }

            result.startLocation = startLocation;
            expectSemicolon();
            result.endLocation = lexer.current.endLocation;
            return result;
        }

        private BreakStatement parseBreakStatement() {

            // BreakStatement :
            //   break ;

            Debug.Assert(lexer.peek().type == TokenType.@break);

            var result = new BreakStatement();
            result.startLocation = lexer.read().startLocation; // break
            expectSemicolon();
            result.endLocation = lexer.current.endLocation;
            return result;
        }

        private ContinueStatement parseContinueStatement() {

            // ContinueStatement :
            //   continue ;

            Debug.Assert(lexer.peek().type == TokenType.@continue);

            var result = new ContinueStatement();
            result.startLocation = lexer.read().startLocation; // continue
            expectSemicolon();
            result.endLocation = lexer.current.endLocation;
            return result;
        }

        private ReturnStatement parseReturnStatement() {

            // ReturnStatement :
            //   return Expression? ;

            Debug.Assert(lexer.peek().type == TokenType.@return);

            var result = new ReturnStatement();

            result.startLocation = lexer.read().startLocation;

            if (followsWithExpression()) {
                result.value = parseExpression();
            }

            expectSemicolon();
            result.endLocation = lexer.current.endLocation;
            return result;

        }

        private LabeledStatement parseLabeledStatement(Identifier label) {

            // LabeledStatement :
            //   Identifier : Statement

            Debug.Assert(lexer.peek().type == TokenType.colon);

            lexer.read(); // :

            return new LabeledStatement() {
                label = label,
                body = parseStatement()
            };
        }

        private TryStatement parseTryStatement() {

            // TryStatement :
            //   try EmbeddedStatement CatchClauseList
            //   try EmbeddedStatement CatchClauseList? finally EmbeddedStatement

            // CatchClauseList :
            //   CatchClause ...

            // CatchClause :
            //   catch EmbeddedStatement
            //   catch ( Type ) EmbeddedStatement
            //   catch ( Type Identifier ) EmbeddedStatement

            Debug.Assert(lexer.peek().type == TokenType.@try);

            var result = new TryStatement();
            result.startLocation = lexer.read().startLocation; // try
            result.tryClause = parseEmbeddedStatement();

            TryStatement.CatchClause last = null;
            while (readToken(TokenType.@catch)) {
                var current = new TryStatement.CatchClause();
                current.startLocation = lexer.current.startLocation;

                if (readToken(TokenType.lParam)) {
                    current.variable = new Variable();
                    current.variable.type = parseType();
                    if (!readToken(TokenType.rParam)) {
                        current.variable.name = expectIdentifier();
                        expectToken(TokenType.rParam, ErrorCode.expectedRParam);
                    }
                }

                current.body = parseEmbeddedStatement();

                if (result.catchClauses == null) {
                    last = result.catchClauses = current;
                } else {
                    last = last.next = current;
                }
            }

            if (readToken(TokenType.@finally)) {
                result.finallyClause = parseEmbeddedStatement();
            }

            return result;
        }

        private WithStatement parseWithStatement() {

            // WithStatement :
            //   with EmabedVariableDeclaration EmbeddedStatement

            Debug.Assert(lexer.peek().type == TokenType.@with);

            var result = new WithStatement();
            result.startLocation = lexer.read().startLocation;

            bool foundParams = readToken(TokenType.lParam);
            if (!foundParams && Compiler.options.disallowMissingParentheses) {
                Compiler.error(ErrorCode.strictExpectedParentheses, "严格模式: 应输入“(”", lexer.current);
            }

            result.target = parseVariableOrExpression();

            if (foundParams) {
                expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            }

            result.body = parseEmbeddedStatement();

            return result;

        }

        #endregion

        #region 解析表达式

        /// <summary>
        /// 解析一个表达式。
        /// </summary>
        /// <param name="minPrecedence">当前解析的最低操作符优先级。</param>
        /// <returns></returns>
        private Expression parseExpression(int minPrecedence = 0) {

            // Expression :
            //   UnaryExpression

            // UnaryExpression :
            //   PostfixExpression
            //   ++ UnaryExpression
            //   -- UnaryExpression
            //   + UnaryExpression
            //   - UnaryExpression
            //   ! UnaryExpression
            //   await UnaryExpression
            //   async UnaryExpression
            //   new Type FuncCallArguments? NewInitilizer?
            //   typeof Type
            //   sizeof Type
            //   -> Expression

            TokenType type = lexer.peek().type;
            Expression parsed;
            switch (type) {

                // Identifier, Identifier<T>, Identifier[], Identifier*
                case TokenType.identifier:
                    parsed = parseTypeExpression(parseIdentifier(), TypeUsage.expression);
                    break;

                // (Expr)
                case TokenType.lParam:
                    parsed = parseParenthesizedExpression();
                    break;

                // new Expr
                case TokenType.@new:
                    parsed = parseNewExpression();
                    break;

                // ""
                case TokenType.stringLiteral:
                    parsed = new StringLiteral() {
                        startLocation = lexer.read().startLocation,
                        value = lexer.current.buffer.ToString(),
                        endLocation = lexer.current.endLocation
                    };
                    break;

                // 0
                case TokenType.intLiteral:
                    parsed = parseIntOrLongLiteral(Lexer.parseLongToken(lexer.peek()));
                    break;

                // 0x0
                case TokenType.hexIntLiteral:
                    parsed = parseIntOrLongLiteral(Lexer.parseHexIntToken(lexer.peek()));
                    break;

                // .0
                case TokenType.floatLiteral:
                    parsed = new FloatLiteral() {
                        startLocation = lexer.read().startLocation,
                        value = Lexer.parseFloatToken(lexer.current),
                        endLocation = lexer.current.endLocation
                    };
                    break;

                // [Expr, ...]
                case TokenType.lBrack:
                    parsed = parseListOrDictLiteral(TokenType.rBrack, ErrorCode.expectedRBrack);
                    break;

                // {key: Expr, ...}
                case TokenType.lBrace:
                    parsed = parseListOrDictLiteral(TokenType.rBrace, ErrorCode.expectedRBrack);
                    break;

                // @ Identifier
                case TokenType.at:
                    parsed = parseMagicVariable();
                    break;

                case TokenType.@null:
                    parsed = new NullLiteral() {
                        startLocation = lexer.read().startLocation
                    };
                    break;

                case TokenType.@true:
                    parsed = new TrueLiteral() {
                        startLocation = lexer.read().startLocation
                    };
                    break;

                case TokenType.@false:
                    parsed = new FalseLiteral() {
                        startLocation = lexer.read().startLocation
                    };
                    break;

                case TokenType.@this:
                    parsed = new ThisLiteral() {
                        startLocation = lexer.read().startLocation
                    };
                    break;

                case TokenType.@base:
                    parsed = new BaseLiteral() {
                        startLocation = lexer.read().startLocation
                    };
                    break;

                case TokenType.inc:
                case TokenType.dec:
                    parsed = new MutatorExpression {
                        prefix = true,
                        startLocation = lexer.read().startLocation,
                        @operator = type,
                        operand = parseExpression(type.getPrecedence()),
                    };
                    break;

                case TokenType.lambda:
                    parsed = parseLambdaLiteral(null);
                    break;

                case TokenType.@typeof:
                    parsed = new TypeOfExpression() {
                        startLocation = lexer.read().startLocation,
                        operand = parseExpression(type.getPrecedence()),
                    };
                    break;

                case TokenType.@sizeof:
                    parsed = new SizeOfExpression() {
                        startLocation = lexer.read().startLocation,
                        operand = parseExpression(type.getPrecedence()),
                    };
                    break;

                default:

                    // +Expr
                    if (type.isUnaryOperator()) {
                        parsed = new UnaryExpression() {
                            startLocation = lexer.read().startLocation,
                            operand = parseExpression(type.getPrecedence()),
                            @operator = type
                        };
                        break;
                    }

                    // int
                    if (type.isPredefinedType()) {
                        parsed = parsePredefinedType();
                        break;
                    }

                    #region 错误

                    if (type.isUsedInGlobal()) {
                        Compiler.error(ErrorCode.invalidExpression, "不能在函数主体内嵌其它成员定义", lexer.peek());
                        skipToNextLine();
                    } else if (type == TokenType.rParam) {
                        Compiler.error(ErrorCode.unexpectedRParam, "语法错误：多余的“)”", lexer.read());
                    } else if (type == TokenType.rBrack) {
                        Compiler.error(ErrorCode.unexpectedRBrack, "语法错误：多余的“]”", lexer.read());
                    } else if (type == TokenType.rBrace) {
                        Compiler.error(ErrorCode.unexpectedRBrace, "语法错误：多余的“}”", lexer.read());
                    } else if (type.isStatementStart()) {
                        Compiler.error(ErrorCode.invalidExpression, String.Format("语法错误：“{0}”只能出现在每行语句的最前面位置", lexer.peek().ToString()), lexer.peek());
                        // 这里不处理当前标记，等接下来继续处理其它语句。
                    } else {
                        Compiler.error(ErrorCode.invalidExpression, String.Format("语法错误：无效的表达式项“{0}”", lexer.peek().ToString()), lexer.peek());
                        skipToNextLine();
                    }

                    return Expression.empty;

                    #endregion

            }

            return parseExpression(parsed, minPrecedence);

        }

        /// <summary>
        /// 在解析一个表达式之后，继续解析剩下的后缀表达式。
        /// </summary>
        /// <param name="parsed">已解析的表达式。</param>
        /// <param name="minPrecedence">当前解析的最低操作符优先级。</param>
        /// <returns></returns>
        private Expression parseExpression(Expression parsed, int minPrecedence = 0) {

            // PostfixExpression :
            //   MemberExpression
            //   MemberExpression [no LineTerminator here] ++
            //   MemberExpression [no LineTerminator here] --

            // MemberExpression :
            //   CallExpression
            //   PrimaryExpression
            //   LambdaLiteral
            //   MemberExpression [ Expression ]
            //   MemberExpression . Identifier
            //   MemberExpression .. Identifier

            TokenType type;
            int precedence;

            while ((precedence = (type = lexer.peek().type).getPrecedence()) >= minPrecedence) {

                // Exper = Val
                if (type.isAssignOperator()) {
                    lexer.read();
                    parsed = new BinaryExpression() {
                        leftOperand = parsed,
                        @operator = type,
                        rightOperand = parseExpression(precedence)
                    };
                    continue;
                }

                switch (type) {

                    // Expr.call
                    case TokenType.period: {
                            var current = new MemberCallExpression();
                            current.target = parsed;
                            lexer.read();
                            current.argument = parseGenericTypeExpression(expectIdentifier(), TypeUsage.expression);
                            parsed = current;
                            continue;
                        }

                    // Expr()
                    case TokenType.lParam: {
                            var current = new FuncCallExpression();
                            current.target = parsed;
                            current.arguments = parseArgumentList(TokenType.rParam, ErrorCode.expectedRParam);
                            current.endLocation = lexer.current.endLocation;
                            parsed = current;
                            continue;
                        }

                    // Expr ->
                    case TokenType.lambda:
                        parsed = parseLambdaLiteral(toIdentifier(parsed));
                        continue;

                    // Expr[]
                    case TokenType.lBrack: {
                            var current = new IndexCallExpression();
                            current.target = parsed;
                            current.arguments = parseArgumentList(TokenType.rBrack, ErrorCode.expectedRBrack);
                            current.endLocation = lexer.current.endLocation;
                            parsed = current;
                            continue;
                        }

                    // Expr ? A : B
                    case TokenType.conditional: {
                            var current = new ConditionalExpression();
                            current.condition = parsed;
                            lexer.read();
                            current.thenExpression = parseExpression();
                            expectToken(TokenType.colon, ErrorCode.expectedColon);
                            current.elseExpression = parseExpression();
                            parsed = current;
                            continue;
                        }

                    // Expr++, Exper--
                    case TokenType.inc:
                    case TokenType.dec:
                        // 如果 ++ 和 -- 在新行出现，则不继续解析。
                        if (lexer.peek().hasLineTerminatorBeforeStart) {
                            return parsed;
                        }
                        parsed = new MutatorExpression {
                            operand = parsed,
                            @operator = type,
                            endLocation = lexer.read().endLocation
                        };
                        continue;

                    // Expr..A
                    case TokenType.periodChain: {
                            var current = new ChainCallExpression();
                            current.target = parsed;
                            lexer.read(); // ..
                            current.argument = expectIdentifier();
                            parsed = new ChainExpression() {
                                chainCallExpression = current,
                              //  body = parseExpression(current, precedence + 1)
                            };
                            continue;
                        }

                    case TokenType.@is:
                        lexer.read();
                        parsed = new IsExpression() {
                            leftOperand = parsed,
                            rightOperand = parseExpression(precedence + 1)
                        };
                        continue;

                    case TokenType.@as:
                        lexer.read();
                        parsed = new AsExpression() {
                            leftOperand = parsed,
                            rightOperand = parseExpression(precedence + 1)
                        };
                        continue;

                    case TokenType.rangeTo: {
                            var current = new RangeLiteral();
                            current.start = parsed;
                            lexer.read();
                            current.end = parseExpression(precedence + 1);
                            parsed = current;
                            continue;
                        }

                    default:

                        // Exper + Val
                        if (type.isBinaryOperator()) {
                            lexer.read();
                            parsed = new BinaryExpression() {
                                leftOperand = parsed,
                                @operator = type,
                                rightOperand = parseExpression(precedence + 1)
                            };
                            continue;
                        }

                        return parsed;
                }
            }

            return parsed;
        }

        /// <summary>
        /// 解析一个标识符。
        /// </summary>
        /// <returns></returns>
        private Identifier parseIdentifier() {
            return new Identifier() {
                startLocation = lexer.read().startLocation,
                value = lexer.current.buffer.ToString(),
                endLocation = lexer.current.endLocation
            };
        }

        /// <summary>
        /// 解析一个魔法变量。
        /// </summary>
        /// <returns></returns>
        private MagicVariable parseMagicVariable() {

            // MagicVariable :
            //   @ Identifier

            Debug.Assert(lexer.peek().type == TokenType.@at);

            var result = new MagicVariable();
            result.startLocation = lexer.read().startLocation; // @
            result.value = expectIdentifier().value;
            result.endLocation = lexer.current.endLocation;
            return result;
        }

        private Expression parseCondition() {

            // Condition :
            //   ( BooleanExpression )

            Expression result;

            if (readToken(TokenType.lParam)) {
                result = parseExpression(0);
                expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            } else {
                if (Compiler.options.disallowMissingParentheses) {
                    Compiler.error(ErrorCode.strictExpectedParentheses, "严格模式: 应输入“(”", lexer.current);
                }
                result = parseExpression();
            }

            return result;
        }

        private Expression parseIntOrLongLiteral(long value) {
            if (value <= int.MaxValue) {
                return new IntLiteral() {
                    startLocation = lexer.read().startLocation,
                    value = (int)value,
                    endLocation = lexer.current.endLocation
                };
            }

            return new LongLiteral() {
                startLocation = lexer.read().startLocation,
                value = value,
                endLocation = lexer.current.endLocation
            };

        }

        private Expression parseListOrDictLiteral(TokenType stopBrack, ErrorCode errorCode) {

            // ListLiteral :
            //   [ ElementList? ]
            //   [ ElementList? , ]

            // ElementList :
            //   Expression
            //   ElementList , Expression

            // DictLiteral :
            //   { PropertyList? }

            // PropertyList :
            //   Property
            //   PropertyNameAndValueList , Property

            // Property :
            //   PropertyName : Expression

            Debug.Assert(lexer.peek().type == (stopBrack == TokenType.rBrace ? TokenType.@lBrace : TokenType.lBrack));

            var startLocation = lexer.read().startLocation; // [
            var type = lexer.current.type;

            // [:], {:}
            if (readToken(TokenType.colon)) {
                expectToken(stopBrack, errorCode);
                return new DictLiteral() {
                    startLocation = startLocation,
                    type = type,
                    endLocation = lexer.current.endLocation
                };
            }

            // [], {}
            if (readToken(stopBrack)) {
                return new ListLiteral() {
                    startLocation = startLocation,
                    type = type,
                    endLocation = lexer.current.endLocation
                };
            }

            var firstKey = parseExpression();

            // [key: value], {key: value}
            if (readToken(TokenType.colon)) {
                var result = new DictLiteral();
                result.startLocation = startLocation;
                result.type = type;

                var last = result.properties = new DictLiteral.Property() {
                    key = type == TokenType.lBrace ? toIdentifier(firstKey) : firstKey,
                    value = parseExpression()
                };

                while (readToken(TokenType.comma)) {

                    // ], }
                    if (readToken(stopBrack)) {
                        goto end;
                    }

                    var current = new DictLiteral.Property();

                    if (type == TokenType.lBrace) {
                        current.key = expectIdentifier();
                    } else {
                        current.key = parseExpression();
                    }

                    expectToken(TokenType.colon, ErrorCode.expectedColon);
                    current.value = parseExpression();

                    last = last.next = current;

                }

                expectToken(stopBrack, errorCode);
            end:
                result.endLocation = lexer.current.endLocation;
                return result;

            } else {

                var result = new ListLiteral();
                result.startLocation = startLocation;
                result.type = type;
                result.values = new List<Expression>() { firstKey };

                while (readToken(TokenType.comma)) {

                    // ], }
                    if (readToken(stopBrack)) {
                        goto end;
                    }

                    result.values.Add(parseExpression());

                }

                expectToken(stopBrack, errorCode);
            end:
                result.endLocation = lexer.current.endLocation;
                return result;

            }

        }

        private FuncCallExpression.Argument parseArgumentList(TokenType stopBrack, ErrorCode errorCode) {

            // CallExpression :
            //   MemberExpression ( ArgumentList? )

            // ArgumentList :
            //   Argument
            //   ArgumentList , Argument

            // ArgumentValue :
            //   ArgumentValue
            //   Identifier : ArgumentValue

            // ArgumentValue :
            //   ToExpression
            //   ref ToExpression
            //   out ToExpression

            Debug.Assert(lexer.peek().type == (stopBrack == TokenType.rParam ? TokenType.@lParam : TokenType.lBrack));

            lexer.read(); // [, (

            FuncCallExpression.Argument first = null, last = null;

            do {

                if (readToken(stopBrack)) {
                    goto end;
                }

                var current = new FuncCallExpression.Argument();

                // 读取命名参数名。
                if (lexer.peek().type == TokenType.identifier) {
                    var currentIdentifier = parseIdentifier();

                    if (readToken(TokenType.colon)) {
                        current.name = currentIdentifier;
                        parseArgumentBody(current);
                    } else {
                        current.value = parseExpression(parseTypeExpression(currentIdentifier, TypeUsage.expression));
                    }

                } else {
                    parseArgumentBody(current);
                }

                if (last == null) {
                    last = first = current;
                } else {
                    last = last.next = current;
                }

            } while (readToken(TokenType.comma));

            expectToken(stopBrack, errorCode);

        end:
            return first;

        }

        private void parseArgumentBody(FuncCallExpression.Argument target) {

            if (readToken(TokenType.@out)) {
                target.type = readToken(TokenType.assignTo) ? FuncCallExpression.ArgumentType.outAssignTo : FuncCallExpression.ArgumentType.@out;
            } else if (readToken(TokenType.@ref)) {
                target.type = FuncCallExpression.ArgumentType.@ref;
            }

            target.value = parseExpression();

        }

        private NewExpression parseNewExpression() {

            // NewExpression :
            //   new FuncCallExpression NewInitilizer?

            // NewInitilizer :
            //   ArrayLiteral
            //   ObjectLiteral

            Debug.Assert(lexer.peek().type == TokenType.@new);

            var result = new NewExpression();
            //result.startLocation = lexer.read().startLocation; // new
            //result.target = parseType(TypeUsage.@new);

            //switch (lexer.peek().type) {
            //    case TokenType.lParam:
            //        result.type = TokenType.lParam;
            //        result.arguments = parseArgumentList(TokenType.rParam, ErrorCode.expectedRParam);
            //        break;
            //    case TokenType.lBrack:
            //        result.type = TokenType.lBrack;
            //        result.arguments = parseArgumentList(TokenType.rBrack, ErrorCode.expectedRBrack);
            //        break;
            //}

            //if (lexer.peek().type == TokenType.lBrace) {
            //    result.initializer = parseListOrDictLiteral(TokenType.rBrace, ErrorCode.expectedRBrace);
            //}

            return result;
        }

        private Expression parseParenthesizedExpression() {

            // ParenthesizedExpression:
            //   ( Expression )

            Debug.Assert(lexer.peek().type == TokenType.lParam);

            //switch (followsWithLambdaOrTypeConversion()) {

            //    // (Parameters) ->
            //    case State.on:
            //        return parseLambdaLiteral(null);

            //    // (Type) Expression
            //    case State.off: {
            //            var result = new CastExpression();
            //            result.startLocation = lexer.read().startLocation; // (
            //            result.targetType = parseType();
            //            expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            //            result.body = parseExpression(TokenType.lParam.getPrecedence());
            //            return result;
            //        }

            //    // (Expression)
            //    default: {
            //            var result = new ParenthesizedExpression();
            //            result.startLocation = lexer.read().startLocation; // (
            //            result.body = parseExpression();
            //            expectToken(TokenType.rParam, ErrorCode.expectedRParam);
            //            result.endLocation = lexer.current.endLocation;
            //            return result;
            //        }
            //}

            throw new Unreachable();

        }

        private State followsWithLambdaOrTypeConversion() {
            lexer.mark();
            lexer.markRead(); // (
            while (true) {
                switch (lexer.markRead().type) {
                    case TokenType.rParam:
                        if (lexer.markRead().type == TokenType.lambda) { // -> identifier
                            return State.on;
                        }
                        return lexer.markCurrent.type.isExpressionStart() ? State.off : State.unset;
                    case TokenType.lParam:
                    case TokenType.eof:
                        return State.unset;
                }
            }
        }

        private LambdaLiteral parseLambdaLiteral(Identifier parsedParameter) {

            // LambdaLiteral :
            //    ( LambdaParameterList ) -> LambdaBody
            //    Identifier -> LambdaBody

            // LambdaParameterList :
            //    LambdaParameter ...

            // LambdaParameter :
            //    ref? Type Identifier
            //    out? Type Identifier
            //    Identifier

            // LambdaBody :
            //    MethodBody
            //    Expression

            Debug.Assert(lexer.peek().type == TokenType.lParam || lexer.peek().type == TokenType.lambda);

            var result = new LambdaLiteral();

            //if (parsedParameter != null) {
            //    result.startLocation = parsedParameter.startLocation;
            //    result.parameters = new Parameter();
            //    result.parameters.name = parsedParameter;
            //} else if (lexer.peek().type == TokenType.lambda) {
            //    result.startLocation = lexer.peek().startLocation; // ->
            //} else {
            //    result.startLocation = lexer.read().startLocation; // (

            //    if (!readToken(TokenType.rParam)) {
            //        Parameter current = result.parameters = new Parameter();
            //        if (readToken(TokenType.@ref)) {
            //            current.variableType = VariableType.refParameter;
            //            current.type = parseType();
            //            current.name = expectIdentifier();
            //        } else if (readToken(TokenType.@out)) {
            //            current.variableType = VariableType.outParameter;
            //            current.type = parseType();
            //            current.name = expectIdentifier();
            //        } else {
            //            current.type = parseType();
            //            if (lexer.peek().type == TokenType.identifier) {
            //                current.name = parseIdentifier();
            //            } else {
            //                current.name = toIdentifier(current.type);
            //                current.type = null;
            //            }
            //        }

            //        bool hasType = current.type != null;
            //        Variable last = result.parameters;
            //        while (readToken(TokenType.comma)) {
            //            current = new Parameter();
            //            if (hasType) {
            //                if (readToken(TokenType.@ref)) {
            //                    current.variableType = VariableType.refParameter;
            //                } else if (readToken(TokenType.@out)) {
            //                    current.variableType = VariableType.outParameter;
            //                }
            //                current.type = parseType();
            //            }
            //            current.name = expectIdentifier();
            //            last = last.next = current;
            //        }

            //        if (!readToken(TokenType.rParam)) {
            //            expectToken(TokenType.rParam, ErrorCode.expectedRParam);

            //            // 跳过参数部分。
            //            do {
            //                lexer.read();
            //            } while (lexer.peek().type != TokenType.eof && lexer.peek().type != TokenType.rParam && lexer.peek().type != TokenType.lambda && lexer.peek().type != TokenType.lBrace && lexer.peek().type != TokenType.rBrace);
            //        }
            //    }

            //}

            //Debug.Assert(lexer.peek().type == TokenType.lambda);
            //lexer.read(); //->
            //if (readToken(TokenType.lBrace)) {
            //    result.body = new ToplevelBlock();
            //    parseBlockBody(result.body);
            //} else {
            //    result.returnBody = parseExpression();
            //}

            return result;

        }

        /// <summary>
        /// 用于指示类型的使用场景。
        /// </summary>
        private enum TypeUsage {
            type,
            expression,
            declartion,
            @new
        }

        private Expression parseType(TypeUsage typeUsage = TypeUsage.type) {

            // Type :
            //   PredefineType
            //   Identifier
            //   Type GenericArgumentList
            //   Type . Identifier GenericArgumentList?
            //   Type *
            //   Type [ ]

            // GenericArgumentList :
            //   < TypeList >

            var type = lexer.peek().type;

            if (type == TokenType.identifier) {
                return parseType(parseIdentifier(), typeUsage);
            }

            if (type.isPredefinedType()) {
                return parsePredefinedType(typeUsage);
            }

            Compiler.error(ErrorCode.expectedType, String.Format("语法错误：应输入类型；“{0}”不是类型", type.getName()), lexer.peek());
            return Expression.empty;

        }

        /// <summary>
        /// 当前是内置类型，继续解析其它类型。
        /// </summary>
        /// <returns></returns>
        private Expression parsePredefinedType(TypeUsage typeUsage = TypeUsage.type) {

            // PredefinedType :
            //   int
            //   float
            //   var
            //   dynamic
            //   ...
            //   PredefinedType []
            //   PredefinedType *

            Debug.Assert(lexer.peek().type.isPredefinedType());

            Expression parsed = new PredefinedTypeLiteral() {
                startLocation = lexer.read().startLocation,
                type = lexer.current.type,
            };

            while (true) {
                switch (lexer.peek().type) {
                    case TokenType.lBrack:

                        // new 表达式中不解析数组类型。
                        if (typeUsage == TypeUsage.@new) {
                            goto default;
                        }
                        lexer.read(); // [

                        //// 读取数组维数。
                        //int rank = 1;
                        //while (readToken(TokenType.comma))
                        //    rank++;

                        expectToken(TokenType.rBrack, ErrorCode.expectedRBrack);
                        parsed = new ArrayTypeExpression() {
                            elementType = parsed,
                            //rank = rank,
                            endLocation = lexer.current.endLocation
                        };
                        continue;
                    case TokenType.mul:
                        lexer.read();
                        parsed = new PtrTypeExpression() {
                            elementType = parsed,
                            endLocation = lexer.current.endLocation
                        };
                        continue;
                    default:
                        return parsed;
                }
            }

        }

        /// <summary>
        /// 解析以标识符开头的类型。
        /// </summary>
        /// <param name="parsedIdentifier"></param>
        /// <returns></returns>
        private Expression parseType(Identifier parsedIdentifier, TypeUsage typeUsage) {
            var parsed = parseTypeExpression(parsedIdentifier, typeUsage);

            while (readToken(TokenType.period)) {
                parsed = parseArrayTypeExpression(new MemberCallExpression() {
                    target = parsed,
                    argument = parseGenericTypeExpression(expectIdentifier(), typeUsage)
                }, typeUsage);
            }

            return parsed;
        }

        /// <summary>
        /// 尝试组合当前类型为复合类型表达式。
        /// </summary>
        /// <param name="parsedIdentifier"></param>
        /// <param name="typeUsage"></param>
        /// <returns></returns>
        private Expression parseTypeExpression(Identifier parsedIdentifier, TypeUsage typeUsage) {
            return parseArrayTypeExpression(parseGenericTypeExpression(parsedIdentifier, typeUsage), typeUsage);
        }

        /// <summary>
        /// 尝试组合当前类型为数组类型。
        /// </summary>
        /// <param name="parsed"></param>
        /// <param name="typeUsage"></param>
        /// <returns></returns>
        private Expression parseArrayTypeExpression(Expression parsed, TypeUsage typeUsage) {

            while (true) {
                switch (lexer.peek().type) {
                    case TokenType.lBrack:

                        // new 表达式中不解析数组类型。
                        if (typeUsage == TypeUsage.@new) {
                            return parsed;
                        }
                        if (typeUsage != TypeUsage.type) {

                            // 判断 [ 是索引还是数组类型。
                            lexer.mark();
                            do {
                                lexer.markRead();
                            } while (lexer.markPeek().type == TokenType.comma);
                            if (lexer.markPeek().type != TokenType.rBrack) {
                                goto default;
                            }
                        }

                        lexer.read(); // [

                        int rank = 1;
                        while (readToken(TokenType.comma))
                            rank++;

                        expectToken(TokenType.rBrack, ErrorCode.expectedRBrack);
                        parsed = new ArrayTypeExpression() {
                            elementType = parsed,
                            //rank = rank,
                            endLocation = lexer.current.endLocation
                        };
                        continue;
                    case TokenType.mul:
                        if (typeUsage == TypeUsage.expression) {
                            lexer.mark();
                            lexer.markRead();

                            // 如果紧跟表达式，则 * 解析为乘号。
                            if (lexer.markRead().type.isExpressionStart()) {
                                goto default;
                            }
                        }
                        parsed = new PtrTypeExpression() {
                            elementType = parsed,
                            endLocation = lexer.read().endLocation
                        };
                        continue;
                    default:
                        return parsed;
                }
            }

        }

        /// <summary>
        /// 尝试组合当前类型为泛型。
        /// </summary>
        /// <param name="parsed"></param>
        /// <param name="typeUsage"></param>
        /// <returns></returns>
        private Expression parseGenericTypeExpression(Identifier parsedIdentifier, TypeUsage typeUsage) {

            if (lexer.peek().type == TokenType.lt) {

                // 判断 < 是小于号还是泛型参数。
                if (typeUsage != TypeUsage.type) {
                    lexer.mark();
                    if (!markReadGenericTypeExpression()) {
                        return parsedIdentifier;
                    }
                }

                lexer.read(); // <

                var result = new GenericTypeExpression();
                result.elementType = parsedIdentifier;
                result.genericArguments = new List<Expression>();
                do {
                    if (lexer.peek().type == TokenType.comma || lexer.peek().type == TokenType.gt) {
                        result.genericArguments.Add(null);
                        continue;
                    }
                    result.genericArguments.Add(parseType());
                } while (readToken(TokenType.comma));

                expectToken(TokenType.gt, ErrorCode.expectedGt);
                result.endLocation = lexer.current.endLocation;
                return result;
            }

            return parsedIdentifier;
        }

        /// <summary>
        /// 判断一个类型之后是否存在泛型参数。
        /// </summary>
        /// <returns></returns>
        private bool markReadGenericTypeExpression() {

            Debug.Assert(lexer.markPeek().type == TokenType.@lt);

            do {

                lexer.markRead(); // <, ,

                // 允许直接结束。
                if (lexer.markPeek().type == TokenType.gt) {
                    break;
                }

                // 如果紧跟的不是类型，则不是类型。
                if (!markReadType()) {
                    return false;
                }

            } while (lexer.markPeek().type == TokenType.comma);

            // 如果是 > 说明一切顺利。
            return lexer.markRead().type == TokenType.gt;
        }

        /// <summary>
        /// 判断一个类型之后是否是数组类型。
        /// </summary>
        /// <returns></returns>
        private bool markReadArrayTypeExpression() {

            Debug.Assert(lexer.markPeek().type == TokenType.lBrack);

            lexer.markRead(); // [

            // 跳过逗号。
            while (lexer.markPeek().type == TokenType.comma) {
                lexer.markRead();
            }

            return lexer.markRead().type == TokenType.rBrack;

        }

        private bool markReadType() {
            var type = lexer.markRead().type;

            if (type == TokenType.identifier) {
                if (lexer.markPeek().type == TokenType.lt && !markReadGenericTypeExpression()) {
                    return false;
                }
            } else if (!type.isPredefinedType()) {
                return false;
            }

            // 读取类型数组和指针组合。
            while (true) {
                switch (lexer.markPeek().type) {
                    case TokenType.lBrack:
                        if (!markReadArrayTypeExpression()) {
                            return false;
                        }
                        continue;
                    case TokenType.mul:
                        lexer.markRead();
                        continue;
                    case TokenType.period:
                        lexer.markRead();
                        if (lexer.markRead().type != TokenType.identifier) {
                            return false;
                        }
                        continue;
                    default:
                        return true;
                }
            }

        }

        private bool followsWithExpression() {
            return lexer.peek().type.isExpressionStart();
        }

        #endregion

    }

}
