#include "FuzeDocumentOperation.h"

#include "ReporterWithReducibleSeverity.h"
#include "InputSupplier.h"
#include "simodo/inout/convert/functions.h"
#include "simodo/interpret/builtins/hosts/base/BaseAnalyzer.h"
#include "simodo/interpret/builtins/hosts/fuze/FuzeAnalyzer.h"
#include "simodo/interpret/builtins/modules/LexicalParametersModule.h"
#include "simodo/interpret/builtins/modules/AstFormationModule.h"
#include "simodo/inout/token/RefBufferStream.h"

#include "simodo/lsp-client/SimodoCommandResult.h"
#include "simodo/lsp-client/CompletionItemKind.h"
#include "simodo/lsp-client/LspEnums.h"
#include "simodo/utility/grammatize.h"
#include "simodo/inout/reporter/StreamReporter.h"

#include "simodo/interpret/SemanticOperationsEnumsLoader.h"

using namespace simodo;

FuzeDocumentOperation::FuzeDocumentOperation(lsp::DocumentContext & doc, const DocumentOperationFactory & factory, const std::string & languageId) 
        : ScriptDocumentOperation(doc, factory, languageId)
        , _doc(doc) 
        , _grammar_dir(factory.grammar_dir())
{
    _hosts_and_operations = interpret::loadSemanticOperationsEnums(_grammar_dir);
}

bool FuzeDocumentOperation::analyze(const std::u16string & text, inout::Reporter_abstract & reporter)
{
    ReporterWithReducibleSeverity   m(reporter);
    ast::FormationFlow              flow;
    InputSupplier                   input_supplier(_doc.server(), _doc.file_name(), text);
    SemanticDataCollector           data_collector;
    parser::FuzeRdp                 fuze(m, _doc.file_name(), flow, input_supplier, data_collector);
    inout::RefBufferStream          buffer_stream(text.data());

    bool ok = fuze.parse(buffer_stream);

    _files = flow.tree().files();

    if (ok) {
        interpret::builtins::FuzeAnalyzer * analyzer = nullptr;

        _grammar = parser::Grammar();

        {
            interpret::Interpret    inter(interpret::InterpretType::Analyzer, m, _loom, flow.tree().files(), flow.tree().root(), 
                                        {
                                            analyzer = new interpret::builtins::FuzeAnalyzer(
                                                                        &inter,
                                                                        data_collector,
                                                                        fs::path(_doc.file_name()).stem().string(),
                                                                        _grammar,
                                                                        parser::TableBuildMethod::LR1
                                                                        ),
                                        });

            _loom.stretch(&inter);
            _loom.finish();

            analyzer->swapProductions(_productions);
            analyzer->swapDirections(_directions);
        }

        m.reduceSeverity();

        for(const auto & [name, node] : _grammar.handlers)
        {
            interpret::Interpret *              inter       = new interpret::Interpret(interpret::InterpretType::Analyzer, m, _loom, flow.tree().files(), node);
            interpret::builtins::BaseAnalyzer * lexAnalyzer = new interpret::builtins::BaseAnalyzer(inter, data_collector);
            std::shared_ptr<interpret::builtins::LexicalParametersModule> 
                                                lex         = std::make_shared<interpret::builtins::LexicalParametersModule>(_grammar.lexical);

            /// \todo Пересмотреть странную передачу самого себя в свой же метод.
            /// PVS Studio: warn V678 An object is used as an argument to its own method.
            /// Consider checking the first actual argument of the 'instantiate' function.
            lexAnalyzer->importNamespace(u"lex", lex->instantiate(lex), inout::null_token_location);
            inter->instantiateSemantics({lexAnalyzer});
            _loom.dock(inter, true);
            _loom.stretch(inter);
            /// \note Убрал параллельную обработку из-за нежелания контролировать гонку за data_collector
            _loom.finish();
        }

        for(const parser::GrammarRule & r : _grammar.rules) 
        {
            interpret::Interpret *              inter       = new interpret::Interpret(interpret::InterpretType::Analyzer, m, _loom, flow.tree().files(), r.reduce_action);
            interpret::builtins::BaseAnalyzer * astAnalyzer = new interpret::builtins::BaseAnalyzer(inter, data_collector);
            std::shared_ptr<interpret::builtins::AstFormationModule> 
                                                ast         = std::make_shared<interpret::builtins::AstFormationModule>(_hosts_and_operations);

            /// \todo Пересмотреть странную передачу самого себя в свой же метод.
            /// PVS Studio: warn V678 An object is used as an argument to its own method.
            /// Consider checking the first actual argument of the 'instantiate' function.
            astAnalyzer->importNamespace(u"ast", ast->instantiate(ast), inout::null_token_location);
            inter->instantiateSemantics({astAnalyzer});
            _loom.dock(inter, true);
            _loom.stretch(inter);
            /// \note Убрал параллельную обработку из-за нежелания контролировать гонку за data_collector
            _loom.finish();
        }

        /// \note Убрал параллельную обработку из-за нежелания контролировать гонку за data_collector
        // _loom.wait();

        data_collector.swap(_semantic_data);
    }

    return true;
}

bool FuzeDocumentOperation::checkDependency(const std::string & uri) const
{
    auto it = std::find(_files.begin(), _files.end(), uri);
    if (it != _files.end())
        return true;

    return ScriptDocumentOperation::checkDependency(uri);
}

variable::Value FuzeDocumentOperation::produceSimodoCommandResponse(const std::u16string & command_name, std::u16string text) const 
{
    if (command_name.empty())
        return {};

    parser::TableBuildMethod method = parser::TableBuildMethod::none;

    if (command_name == SLR_method_report)
        method = parser::TableBuildMethod::SLR;
    else if (command_name == LR1_method_report)
        method = parser::TableBuildMethod::LR1;
    else    
        return ScriptDocumentOperation::produceSimodoCommandResponse(command_name, text);

    inout::RefBufferStream  buffer_stream(text.data());
    std::ostringstream      out;
    inout::StreamReporter   m(out);
    parser::Grammar         grammar;

    utility::grammatize(    _doc.file_name(),
                            buffer_stream,
                            out,
                            m,
                            "",   //json_file_name,
                            "",   //st_dot_file_name,
                            method, // grammar_builder_method,
                            "",   //dot_file_name,
                            true, //need_state_transitions_info,
                            true, //need_rules_info,
                            false,//need_st_info,
                            false,//need_time_intervals,
                            true, //need_silence,
                            true, //need_build_grammar,
                            false,//need_load_grammar
                            true, //need_analyze_handles
                            false,//need_analyze_inserts
                            grammar
                        );

    return variable::Object {{
                {u"uri",            inout::toU16(_doc.file_name())},
                {u"commandResult",  variable::Object {{
                    {u"id",         u"grammar-report"},
                    {u"title",      u"'" + fs::path(_doc.file_name()).stem().u16string() + u"' " + command_name},
                    {u"type",       static_cast<int64_t>(lsp::SimodoCommandReportType::plainText)},
                    {u"text",       inout::encodeSpecialChars(inout::toU16(out.str()))},
                }}},
            }};
}

variable::Value FuzeDocumentOperation::produceHoverResponse(const lsp::Position & pos) const
{
    for(const auto & [p_key, p_pair] : _productions) {
        const auto & [prod, pattern] = p_pair;
        if (prod.location().range().start().line() == pos.line()
         && prod.location().uri_index() == 0) {
            if (prod.location().range().start().character() <= pos.character()
             && prod.location().range().end().character() > pos.character()) {
                return variable::Object {{
                            {u"contents",   variable::Object {{
                                {u"kind",       u"plaintext"},
                                {u"value",      inout::encodeSpecialChars(makeHover(prod))},
                            }}},
                            {u"range",      _doc.makeRange(prod.location().range())},
                        }};
            }
        }
        for(const inout::Token & t : pattern)
            if (t.location().range().start().line() == pos.line()
             && t.location().uri_index() == 0) {
                if (t.location().range().start().character() <= pos.character()
                 && t.location().range().end().character() > pos.character()) {
                    return variable::Object {{
                                {u"contents",   variable::Object {{
                                    {u"kind",       u"plaintext"},
                                    {u"value",      inout::encodeSpecialChars(makeHover(t))},
                                }}},
                                {u"range",      _doc.makeRange(t.location().range())},
                            }};
                }
            }
    }

    for(const inout::Token & dt : _directions)
        if (dt.location().range().start().line() == pos.line()
         && dt.location().uri_index() == 0
         && dt.location().range().start().character() <= pos.character()
         && dt.location().range().end().character() > pos.character())
            return variable::Object {{
                        {u"contents",   variable::Object {{
                            {u"kind",       u"plaintext"},
                            {u"value",      inout::encodeSpecialChars(dt.lexeme() + u" - управляющий символ предотвращения несогласованности грамматики типа R|S")},
                        }}},
                        {u"range",      _doc.makeRange(dt.location().range())},
                    }};

    // for(const auto & [token, ref] : _refs)
    //     if (token.location().range().start().line() == pos.line()
    //      && token.location().uri_index() == 0
    //      && token.location().range().start().character() <= pos.character()
    //      && token.location().range().end().character() > pos.character())
    //         return variable::Object {{
    //                     {u"contents",   variable::Object {{
    //                         {u"kind",       u"plaintext"},
    //                         {u"value",      inout::encodeSpecialChars(ref)},
    //                     }}},
    //                     {u"range",      _doc.makeRange(token.location().range())},
    //                 }};

    return ScriptDocumentOperation::produceHoverResponse(pos);
}

variable::Value FuzeDocumentOperation::produceGotoDeclarationResponse(const lsp::Position & pos) const
{
    return produceGotoDefinitionResponse(pos);
}

variable::Value FuzeDocumentOperation::produceGotoDefinitionResponse(const lsp::Position & pos) const
{
    for(const auto & [p_key, p_pair] : _productions) {
        const auto & [prod, pattern] = p_pair;
        if (prod.location().range().start().line() == pos.line()
         && prod.location().uri_index() == 0) {
            if (prod.location().range().start().character() <= pos.character()
             && prod.location().range().end().character() >= pos.character()) {
                auto it = _productions.find(prod.lexeme());
                if (it == _productions.end())
                    return {};
                const auto & [t_prod, t_pattern] = it->second;
                std::u16string uri = t_prod.location().uri_index() < _files.size() 
                                        ? inout::toU16(_files[t_prod.location().uri_index()])
                                        : inout::toU16(_doc.file_name());
                return variable::Object {{
                            {u"uri",    uri},
                            {u"range",  _doc.makeRange(t_prod.location().range())},
                        }};
            }
        }
        for(const inout::Token & t : pattern)
            if (t.location().range().start().line() == pos.line()
             && t.location().uri_index() == 0) {
                if (t.location().range().start().character() <= pos.character()
                 && t.location().range().end().character() >= pos.character()) {
                    auto it = _productions.find(t.lexeme());
                    if (it == _productions.end())
                        return {};
                    const auto & [t_prod, t_pattern] = it->second;
                    std::u16string uri = t_prod.location().uri_index() < _files.size() 
                                         ? inout::toU16(_files[t_prod.location().uri_index()])
                                         : inout::toU16(_doc.file_name());
                    return variable::Object {{
                                {u"uri",    uri},
                                {u"range",  _doc.makeRange(t_prod.location().range())},
                            }};
                }
            }
    }

    // for(const auto & [t, ref] : _refs)
    //     if (t.location().range().start().line() == pos.line()
    //       && t.location().uri_index() == 0) {
    //         if (t.location().range().start().character() <= pos.character()
    //           && t.location().range().end().character() >= pos.character()) {
    //             return variable::Object {{
    //                         {u"uri",    ref},
    //                         {u"range",  _doc.makeRange({0,0},{0,0})},
    //                     }};
    //         }
    //     }

    return ScriptDocumentOperation::produceGotoDefinitionResponse(pos);
}

variable::Value FuzeDocumentOperation::produceCompletionResponse(const lsp::CompletionParams & completionParams) const
{
    bool scoped = false;

    _doc.server().log().debug("FuzeDocumentOperation::produceCompletionResponse: completionParams.position = [" 
                                + std::to_string(completionParams.position.line())
                                + ", " + std::to_string(completionParams.position.character()) + "]");

    for(const auto & [oped_scope, close_scope] : _semantic_data.scoped())
        if (oped_scope.uri_index() == 0) {
            _doc.server().log().debug("FuzeDocumentOperation::produceCompletionResponse: scope = [" 
                                        + std::to_string(oped_scope.range().start().line())
                                        + ", " + std::to_string(oped_scope.range().start().character()) + "] - ["
                                        + std::to_string(close_scope.range().end().line())
                                        + ", " + std::to_string(close_scope.range().end().character()) + "]");

            if (completionParams.position >= oped_scope.range().start()
             && completionParams.position <= close_scope.range().end()) {
                _doc.server().log().debug("FuzeDocumentOperation::produceCompletionResponse: found!");
                scoped = true;
                break;
            }
        }

    if (scoped)
        return ScriptDocumentOperation::produceCompletionResponse(completionParams);

    std::vector<variable::Value> completion_items;

    std::u16string last_production;
    for(const auto & [key, value] : _productions)
        if (last_production != key) {
            completion_items.push_back(makeCompletionItem(key, u"production", u"", int64_t(lsp::CompletionItemKind::Variable)));
            last_production = key;
        }
    
    std::vector<std::u16string> keywords {
        u"main", u"include", u"remove",
    };
    for(const std::u16string & w : keywords)
        completion_items.push_back(makeCompletionItem(w, u"keyword", u"", int64_t(lsp::CompletionItemKind::Keyword)));

    return completion_items;
}

variable::Value FuzeDocumentOperation::produceSemanticTokensResponse() const
{
    std::function<int64_t(inout::LexemeType)> makeType = [](inout::LexemeType type){
        switch (type)
        {
        case inout::LexemeType::Compound:
            return 0;
        case inout::LexemeType::Punctuation:
        case inout::LexemeType::Annotation:
        case inout::LexemeType::Id:
        case inout::LexemeType::Number:
            return 1;
        default:
            break;
        }
        return 0;
    };
    std::function<int64_t(inout::LexemeType)> makeModifiers = [](inout::LexemeType type){
        int64_t modifiers = 0;
        if (type == inout::LexemeType::Compound)
            modifiers += 1;
        else if (type != inout::LexemeType::Punctuation)
            modifiers += 2;
        return modifiers;
    };

    struct TokenInfo { int64_t length, type, modifiers; };
    struct TokenComp {
        bool operator() (const std::pair<int64_t,int64_t> & x1, const std::pair<int64_t,int64_t> & x2) const {
            return x1.first < x2.first || (x1.first == x2.first && x1.second < x2.second);
        }
    };    

    std::map<std::pair<int64_t,int64_t>, TokenInfo, TokenComp> tokens;

    for(const auto & [p_key, p_pair] : _productions) {
        const auto & [prod, pattern] = p_pair;
        if (prod.location().uri_index() == 0) {
            int64_t line      = static_cast<int64_t>(prod.location().range().start().line());
            int64_t character = static_cast<int64_t>(prod.location().range().start().character());
            int64_t length    = static_cast<int64_t>(prod.location().range().end().character()-character);
            int64_t modifiers = static_cast<int64_t>(3);
            tokens.insert({ {line, character}, {length, 0, modifiers}});

            for(const inout::Token & t : pattern) {
                int64_t line      = static_cast<int64_t>(t.location().range().start().line());
                int64_t character = static_cast<int64_t>(t.location().range().start().character());
                int64_t length    = static_cast<int64_t>(t.location().range().end().character()-character);
                int64_t modifiers = makeModifiers(t.type());
                tokens.insert({ {line, character}, {length, makeType(t.type()), modifiers}});
            }
        }
    }

    for(const inout::Token & t : _directions) 
        if (t.location().uri_index() == 0) {
            int64_t line      = static_cast<int64_t>(t.location().range().start().line());
            int64_t character = static_cast<int64_t>(t.location().range().start().character());
            int64_t length    = static_cast<int64_t>(t.location().range().end().character()-character);
            int64_t modifiers = static_cast<int64_t>(2);
            tokens.insert({ {line, character}, {length, 2, modifiers}});
        }

    // for(const auto & [t, ref] : _refs) 
    //     if (t.location().uri_index() == 0) {
    //         int64_t line      = static_cast<int64_t>(t.location().range().start().line());
    //         int64_t character = static_cast<int64_t>(t.location().range().start().character());
    //         int64_t length    = static_cast<int64_t>(t.location().range().end().character()-character);
    //         int64_t modifiers = static_cast<int64_t>(1);
    //         tokens.insert({ {line, character}, {length, 7, modifiers}});
    //     }

    std::vector<variable::Value> sem_tokens;

    for(const auto & [p,t] : tokens) {
        sem_tokens.push_back(p.first);
        sem_tokens.push_back(p.second);
        sem_tokens.push_back(t.length);
        sem_tokens.push_back(t.type);
        sem_tokens.push_back(t.modifiers);
    }
    
    const variable::Value   script_response_value = ScriptDocumentOperation::produceSemanticTokensResponse();
    if (script_response_value.type() == variable::ValueType::Object) {
        const variable::Value & script_response_data = script_response_value.getObject()->find(u"data");
        if (script_response_data.type() == variable::ValueType::Array) {
            const std::vector<variable::Value> & script_response_array = script_response_data.getArray()->values();
            sem_tokens.insert(sem_tokens.end(), script_response_array.begin(), script_response_array.end());
        }
    }

    return variable::Object {{{u"data", sem_tokens}}};
}

variable::Value FuzeDocumentOperation::produceDocumentSymbolsResponse() const
{
    std::vector<variable::Value> doc_symbols;
    std::multimap<uint64_t,std::pair<inout::Token, std::vector<inout::Token>>> prods_by_pos;

    if (_productions.empty())
        return doc_symbols;

    for(const auto & [p_key, p_pair] : _productions)
        if (p_pair.first.location().uri_index() == 0)
            prods_by_pos.insert({p_pair.first.location().range().start().line()*uint64_t(1000),p_pair});

    inout::Token    current_prod(inout::LexemeType::Empty, u"", inout::null_token_location);
    inout::Position current_prod_end;
    std::u16string  current_pattern_str;

    for(const inout::Token & t : prods_by_pos.begin()->second.second) 
        current_pattern_str += u" " + t.token();

    for(const auto & [p_line, p_pair] : prods_by_pos) {
        const auto & [prod, pattern] = p_pair;

        if (prod.token() != current_prod.token()) {
            if (current_prod.type() != inout::LexemeType::Empty) {
                uint32_t end_line = prod.location().range().start().line();
                uint32_t end_char = prod.location().range().start().character();

                end_line = (end_char == 0) ? end_line - 1 : end_line;
                end_char = (end_char == 0) ? 1000 : end_char - 1;

                doc_symbols.push_back(variable::Object {{
                    {u"name",           current_prod.token()},
                    {u"detail",         current_prod.token() + u" =" + current_pattern_str},
                    {u"kind",           int64_t(lsp::SymbolKind::Variable)},
                    {u"range",          _doc.makeRange({current_prod.location().range().start(), inout::Position(end_line,end_char)})},
                    {u"selectionRange", _doc.makeRange(current_prod.location().range())},
                }});
            }
            current_prod = prod;
            current_pattern_str.clear();
        }

        if (!current_pattern_str.empty())       current_pattern_str += u" |";
        for(const inout::Token & t : pattern)   current_pattern_str += u" " + t.token();

        current_prod_end = pattern.back().location().range().end();
    }

    doc_symbols.push_back(variable::Object {{
        {u"name",           current_prod.token()},
        {u"detail",         current_prod.token() + u" =" + current_pattern_str},
        {u"kind",           int64_t(lsp::SymbolKind::Variable)},
        {u"range",          _doc.makeRange({current_prod.location().range().start(), inout::Position(current_prod_end.line()+1000,0)})},
        {u"selectionRange", _doc.makeRange(current_prod.location().range())},
    }});

    // \todo Использование ScriptDocumentOperation::produceDocumentSymbolsResponse кидает ассерт, так как тот код не предназначен для семантики fuze:
    //       при обработке `lex { ... }` getFunctionBodyRange, кажется, иногда не может найти закрывающую скобку,
    //       но в целом мне вообще не понятно, для чего использовать DocumentSymbols из ScriptDocumentOperation для fuze
    // variable::Value script_response_value = ScriptDocumentOperation::produceDocumentSymbolsResponse();
    // if (script_response_value.type() == variable::ValueType::Array) {
    //     const std::vector<variable::Value> & array = script_response_value.getArray()->values();
    //     doc_symbols.insert(doc_symbols.end(), array.begin(), array.end());
    // }

    return doc_symbols;
}

std::u16string FuzeDocumentOperation::makeHover(const inout::Token & t) const
{
    std::u16string  text    = t.lexeme();
    auto            range   = _productions.equal_range(text);

    if (range.first == range.second)
        text += u" - терминальный символ грамматики";
    else
        for(auto it = range.first; it != range.second; ++it) {
            text += u"\n→";
            for(const inout::Token & t : it->second.second)
                text += u" " + t.token();
        }

    return text;
}

