﻿using Ollama;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;

namespace OllamaApi
{
    internal class Program
    {
        /// <summary>
        /// 使用Ollam聊天的对话
        /// </summary>
        /// <param name="modelName">模型名称</param>
        /// <param name="stream">是否使用流式</param>
        /// <param name="http">服务器网址</param>
        private static async Task ChatFunctionAsync(string modelName, bool stream = true, string http = "http://127.0.0.1:11434/api")
        {
            OllamaApiClient ollama = new OllamaApiClient(baseUri: new Uri(http));
            Console.WriteLine("开始对话！！！");
            List<Message> messages = new List<Message>();
            while (true)
            {
                Console.WriteLine("User:");
                string userInput = Console.ReadLine();
                if (userInput == "exit")
                {
                    break;
                }
                messages.Add(new Message(MessageRole.User, userInput, null, null));
                var enumerable = ollama.Chat.GenerateChatCompletionAsync(modelName, messages, stream: stream);
                Console.WriteLine("Agent:");
                StringBuilder builder = new StringBuilder();
                await foreach (GenerateChatCompletionResponse response in enumerable)
                {
                    string content = response.Message.Content;
                    builder.AppendLine(content);
                    Console.Write(content);
                }
                messages.Add(new Message(MessageRole.Assistant, builder.ToString(), null, null));
                Console.WriteLine();
            }
            Console.WriteLine("对话结束！！！");
        }

        /// <summary>
        /// 主函数
        /// </summary>
        static void Main(string[] args)
        {
            //这里需要和上方ollama创建模型的时候的名称一致
            Task task = ChatFunctionAsync("deepseek-r1-llama-8b");
            task.Wait();
        }
    }
}