﻿namespace LLamaSharpTest
{
    internal class Program
    {
        static void Main(string[] args)
        {
            Chart();
            Console.WriteLine("Hello, World!");
        }

        static async Task Chart()
        {
            try
            {
                string modelPath = @"F:\工作\qwen1_5-7b-chat-q2_k.gguf"; // change it to your own model path.


                var parameters = new LLama.Common.ModelParams(modelPath)
                {
                    ContextSize = 1024, // The longest length of chat as memory.
                    //GpuLayerCount = 5 // How many layers to offload to GPU. Please adjust it according to your GPU memory.
                };
                using var model = LLama.LLamaWeights.LoadFromFile(parameters);
                using var context = model.CreateContext(parameters);
                var executor = new LLama.InteractiveExecutor(context);


                // Add chat histories as prompt to tell AI how to act.
                var chatHistory = new LLama.Common.ChatHistory();
                chatHistory.AddMessage(LLama.Common.AuthorRole.System, "Transcript of a dialog, where the User interacts with an Assistant named Bob. Bob is helpful, kind, honest, good at writing, and never fails to answer the User's requests immediately and with precision.");
                chatHistory.AddMessage(LLama.Common.AuthorRole.User, "Hello, Bob.");
                chatHistory.AddMessage(LLama.Common.AuthorRole.Assistant, "Hello. How may I help you today?");


                LLama.ChatSession session = new(executor, chatHistory);


                LLama.Common.InferenceParams inferenceParams = new LLama.Common.InferenceParams()
                {
                    MaxTokens = 256, // No more than 256 tokens should appear in answer. Remove it if antiprompt is enough for control.
                    AntiPrompts = new List<string> { "User:" } // Stop generation once antiprompts appear.
                };


                Console.ForegroundColor = ConsoleColor.Yellow;
                Console.Write("The chat session has started.\nUser: ");
                Console.ForegroundColor = ConsoleColor.Green;
                string userInput = Console.ReadLine() ?? "";


                while (userInput != "exit")
                {
                    await foreach ( // Generate the response streamingly.
                        var text
                        in session.ChatAsync(
                            new LLama.Common.ChatHistory.Message(LLama.Common.AuthorRole.User, userInput),
                            inferenceParams))
                    {
                        Console.ForegroundColor = ConsoleColor.White;
                        Console.Write(text);
                    }
                    Console.ForegroundColor = ConsoleColor.Green;
                    userInput = Console.ReadLine() ?? "";
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.ToString());
            }

        }
    }
}
