﻿using System;
using System.Collections.Generic;
using System.Linq;
using ParserGenerator.Exceptions;
using ParserGenerator.Extensions;
using ParserGenerator.GrammarConstruction;
using ParserGenerator.ScannerConstruction;
using ParserGenerator.SyntacticAnalysis;

namespace ParserGenerator.GrammarAnalysis
{
    [Serializable]
    internal sealed class LexicalPrecedenceResolver
    {
        private readonly ScannerAutomaton _scannerAutomaton;
        private readonly HashSet<GrammarTerminalSymbol> _tokens;
        private readonly Dictionary<LexicalPrecedenceProfile, HashSet<ScannerAutomatonState>> _profileMap;
        public List<LexicalPrecedenceConflict> Conflicts { get; }
        public Dictionary<ScannerAutomatonState, GrammarTerminalSymbol> ScannerAccept { get; }

        public LexicalPrecedenceResolver(ScannerAutomaton scannerAutomaton, HashSet<GrammarTerminalSymbol> tokens)
        {
            if (scannerAutomaton == null)
                throw new ArgumentNullException(nameof(scannerAutomaton));
            if (tokens == null)
                throw new ArgumentNullException(nameof(tokens));
            _scannerAutomaton = scannerAutomaton;
            _tokens = tokens;
            _profileMap = new Dictionary<LexicalPrecedenceProfile, HashSet<ScannerAutomatonState>>();
            Conflicts = new List<LexicalPrecedenceConflict>();
            ScannerAccept = new Dictionary<ScannerAutomatonState, GrammarTerminalSymbol>();
        }

        private GrammarTerminalSymbol GetTokenFromRule(LexicalRuleDeclarator rule)
        {
            if (rule == null)
                throw new ArgumentNullException(nameof(rule));
            GrammarTerminalSymbol token = _scannerAutomaton.Tokens.TryGetToken(rule, false);
            if (token == null)
            {
                GrammarErrorException e = new GrammarErrorException(SR.LexicalRuleYieldNoToken);
                e.AddLocation(rule.Context);
                throw e;
            }
            return token;
        }

        private HashSet<GrammarTerminalSymbol> GetLongerTokens(ScannerAutomatonState state)
        {
            if (state == null)
                throw new ArgumentNullException(nameof(state));
            HashSet<GrammarTerminalSymbol> longerTokens = new HashSet<GrammarTerminalSymbol>();
            foreach (LexicalRuleDeclarator rule in state.Reductions)
            {
                GrammarTerminalSymbol token = GetTokenFromRule(rule);
                longerTokens.Add(token);
            }
            longerTokens.IntersectWith(_tokens);
            return longerTokens;
        }

        private void RecordConflict(ScannerAutomatonState state, LexicalPrecedenceProfile profile)
        {
            LexicalPrecedenceConflict conflict = new LexicalPrecedenceConflict(state, profile);
            Conflicts.Add(conflict);
        }

        private void ResolveState(ScannerAutomatonState state, HashSet<GrammarTerminalSymbol> shorterTokens, GrammarTerminalSymbol shorterToken)
        {
            if (state == null)
                throw new ArgumentNullException(nameof(state));
            if (shorterTokens == null)
                throw new ArgumentNullException(nameof(shorterTokens));
            foreach (var t in state.Transitions)
            {
                HashSet<GrammarTerminalSymbol> longerTokens = GetLongerTokens(t.Value.Target);
                LexicalPrecedenceProfile profile = new LexicalPrecedenceProfile(shorterTokens, shorterToken, longerTokens);
                HashSet<ScannerAutomatonState> entry = _profileMap.TryGetValueOrCreate(profile);
                if (!entry.Contains(t.Value.Target))
                {
                    GrammarTerminalSymbol longerToken;
                    if (longerTokens.Count == 0 || shorterToken != null && _scannerAutomaton.LexicalPrecedences.CheckShorterToken(shorterToken, longerTokens))
                        longerToken = shorterToken;
                    else
                    {
                        longerToken = _scannerAutomaton.LexicalPrecedences.ChooseLongerToken(shorterTokens, longerTokens);
                        if (longerToken != null)
                            ScannerAccept[t.Value.Target] = longerToken;
                        else if (entry.Count == 0)
                            RecordConflict(t.Value.Target, profile);
                    }
                    entry.Add(t.Value.Target);
                    longerTokens.UnionWith(shorterTokens);
                    ResolveState(t.Value.Target, longerTokens, longerToken);
                }
            }
        }

        public void Resolve()
        {
            ScannerAutomatonStart start = _scannerAutomaton.Starts.Values.First();
            ScannerAutomatonState s0 = start.State;
            ResolveState(s0, new HashSet<GrammarTerminalSymbol>(), null);
        }
    }
}
