﻿using MultiAgentsClient.ChatDataStructures;
using MultiAgentsClient.Shared;
using OpenAI.ObjectModels.RequestModels;

namespace MultiAgentsServer.Scripts.Services;

public class LlmComEntryPoint
{
    public readonly OllamaComService ollamaCom;
    public readonly OpenAIComService openAiCom;

    public LlmComEntryPoint(OllamaComService ollamaComService, OpenAIComService openAiComService)
    {
        ollamaCom = ollamaComService;
        openAiCom = openAiComService;
    }
    public async IAsyncEnumerable<LlmStreamSlice?> RequestChatAsStream(List<ChatMessageData> messages, string requestedModel, float temperature, List<ToolDefinition>? toolSet = null)
    {
        if (ollamaCom.ModelList.Contains(requestedModel))
        {
            await foreach (var slice in ollamaCom.RequestChatAsStream(messages, requestedModel, temperature, toolSet))
            {
                yield return slice;
            }
        }
        else if (openAiCom.ModelList.Contains(requestedModel))
        {
            await foreach (var slice in openAiCom.RequestChatAsStream(messages, requestedModel, temperature, toolSet))
            {
                yield return slice;
            }
        }
        else
        {
            throw new Exception($"Model {requestedModel} not supported");
        }
    }

    public async Task<LlmStreamSlice?> RequestChat(List<ChatMessageData> messages, string requestedModel, float temperature, List<ToolDefinition>? toolSet = null)
    {
        LlmStreamSlice? slice = null;
        if (ollamaCom.ModelList.Contains(requestedModel))
        {
            slice = await ollamaCom.RequestChat(messages, requestedModel, temperature, toolSet);
        }
        else if (openAiCom.ModelList.Contains(requestedModel))
        {
            slice = await openAiCom.RequestChat(messages, requestedModel, temperature, toolSet);
        }
        else
        {
            throw new Exception($"Model {requestedModel} not supported");
        }

        return slice;
    }


    public async Task<LlmStreamSlice?> RequestChatSimp(List<LlmMessageSimp> msgsSimp, string model, float temp = 1.0f)
    {
        LlmStreamSlice? slice = null;
        if (ollamaCom.ModelList.Contains(model))
        {
            slice = await ollamaCom.RequestChatSimp(msgsSimp, model, temp);
        }
        else if (openAiCom.ModelList.Contains(model))
        {
            slice = await openAiCom.RequestChatSimp(msgsSimp, model, temp);
        }
        else
        {
            throw new Exception($"Model {model} not supported");
        }

        return slice;
    }
}