﻿// Gateway.Api 项目的版权、商标、专利和其他相关权利均受相应法律法规的保护。使用本项目应遵守相关法律法规和许可证的要求。
// 
// 本项目主要遵循 MIT 许可证和 Apache 许可证（版本 2.0）进行分发和使用。许可证位于源代码树根目录中的 LICENSE-MIT 和 LICENSE-APACHE 文件。
// 
// 不得利用本项目从事危害国家安全、扰乱社会秩序、侵犯他人合法权益等法律法规禁止的活动！任何基于本项目二次开发而产生的一切法律纠纷和责任，我们不承担任何责任！

using Aurora.AI.Common.Models;
using Aurora.AI.Common.Options;
using Aurora.AI.Wiki.Services;
using Aurora.Redis;
using Microsoft.KernelMemory;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Plugins.Core;

namespace Aurora.AI.Wiki;
/// <summary>
/// memory service
/// </summary>
public sealed class WikiMemoryService
{
    /// <summary>
    /// 创建kernel 后续将加入函数
    /// </summary>
    /// <param name="chatModel"></param>
    /// <param name="setting"></param>
    /// <returns></returns>
    public Kernel CreateFunctionKernel(string chatModel, AICommonSetting setting)
    {
        return CreateFunctionKernel(setting.Token, chatModel, setting);
    }
    /// <summary>
    /// 创建Function Kernel
    /// </summary>
    /// <param name="apiKey"></param>
    /// <param name="modelId"></param>
    /// <param name="uri"></param>
    /// <returns></returns>
    public Kernel CreateFunctionKernel(string apiKey, string modelId, AICommonSetting setting)
    {
        var builder = Kernel.CreateBuilder()
           .AddOpenAIChatCompletion(modelId, apiKey, //这里后续需要将 chatToken 改为变量读取
               httpClient: new HttpClient(new OpenAiHttpClientHandler(setting)));
#pragma warning disable SKEXP0050 // 类型仅用于评估，在将来的更新中可能会被更改或删除。取消此诊断以继续。
        builder.Plugins.AddFromType<ConversationSummaryPlugin>();
#pragma warning restore SKEXP0050 // 类型仅用于评估，在将来的更新中可能会被更改或删除。取消此诊断以继续。
        var kernel = builder.Build();
        return kernel;
    }
    /// <summary>
    /// 创建知识库内存服务
    /// </summary>
    /// <param name="searchClientConfig"></param>
    /// <param name="maxTokensPerLine"></param>
    /// <param name="maxTokensPerParagraph"></param>
    /// <param name="overlappingTokens"></param>
    /// <param name="chatModel"></param>
    /// <param name="embeddingModel"></param>
    /// <returns></returns>
    public MemoryServerless CreateMemoryServerless(SearchClientConfig searchClientConfig,
        int maxTokensPerLine,
        int maxTokensPerParagraph,
        int overlappingTokens,
        string? chatModel, string? embeddingModel, AICommonSetting setting, IRedisCache redisCache, WikiSettingService wikiSettingService)
    {
        var memory = new KernelMemoryBuilder()
            .WithQdrantMemoryDb(new QdrantConfig()
            {
                APIKey = OpenAIOption.WikiApiKey,
                Endpoint = OpenAIOption.WikiConnection,
            })
            .WithSearchClientConfig(searchClientConfig)
        .WithCustomTextPartitioningOptions(new Microsoft.KernelMemory.Configuration.TextPartitioningOptions
        {
            MaxTokensPerLine = maxTokensPerLine,
            MaxTokensPerParagraph = maxTokensPerParagraph,
            OverlappingTokens = overlappingTokens
        })
        .WithOpenAITextGeneration(new OpenAIConfig
        {
            APIKey = setting.Token,
            TextModel = chatModel
        }, null, new HttpClient(new OpenAiHttpClientHandler(setting)))
        .WithOpenAITextEmbeddingGeneration(new OpenAIConfig
        {
            APIKey = setting.Token,
            EmbeddingModel = embeddingModel,
        }, null, false, new HttpClient(new OpenAiHttpClientHandler(setting)))
        .AddSingleton(new WikiMemoryService())
        .AddSingleton<IRedisCache>(redisCache)
        .AddSingleton(wikiSettingService)
        .Build<MemoryServerless>();
        return memory;
    }
    /// <summary>
    /// 创建用于操作的内存服务（不要用于向量搜索）
    /// </summary>
    /// <returns></returns>
    public MemoryServerless CreateMemoryServerless(string embeddingModel, string? model, AICommonSetting setting)
    {
        return new KernelMemoryBuilder()
            .WithQdrantMemoryDb(new QdrantConfig()
            {
                APIKey = OpenAIOption.WikiApiKey,
                Endpoint = OpenAIOption.WikiConnection,
            })
            .WithOpenAITextGeneration(new OpenAIConfig
            {
                APIKey = setting.Token,
                TextModel = model
            }, null, new HttpClient(new OpenAiHttpClientHandler(setting)))
            .WithOpenAITextEmbeddingGeneration(new OpenAIConfig
            {
                // 如果 EmbeddingToken 为空，则使用 ChatToken
                APIKey = setting.Token,
                EmbeddingModel = embeddingModel
            }, null, false, new HttpClient(new OpenAiHttpClientHandler(setting)))
            .Build<MemoryServerless>();
    }
}