﻿using System.ClientModel;
using System.Reflection.Metadata;
using System.Text.Json;
using System.Threading.Tasks;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Configuration;
using OpenAI;
using OpenAI.Chat;
using ChatMessage = Microsoft.Extensions.AI.ChatMessage;

namespace llmChat;

class Program
{
    // 修改Main方法为异步并正确await Test方法，并添加了错误处理
    static void Main(string[] args)
    {
        try
        {
            var myChat = new ai_Chat2();
            Task.Run(() => myChat.Chat_Loop()).GetAwaiter().GetResult();
        }
        catch (Exception ex)
        {
            Console.WriteLine($"Critical Error: {ex}");
            Environment.ExitCode = 1;
        }
    }
}
