﻿using ClassifyApp.Model;
using CommConfig;
using LdtCommonLibraryHttp;
using LdtCommonUtil;
using Microsoft.AspNetCore.JsonPatch.Internal;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using OpenAI.Net;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;

namespace ClassifyApp.Webapi.Controllers
{
    /// <summary>
    /// Class ClassifyAppController.
    /// Implements the <see cref="Microsoft.AspNetCore.Mvc.ControllerBase" />
    /// </summary>
    /// <seealso cref="Microsoft.AspNetCore.Mvc.ControllerBase" />
    [ApiController]
    [Route("api/v1")]
    public class MainController : ControllerBase
    {
        private readonly string classifyUrl;
        private readonly IHttpHelper httpHelper;
        private readonly IOpenAIService service;

        /// <summary>
        /// Initializes a new instance of the <see cref="MainController"/> class.
        /// </summary>
        /// <param name="httpHelper">The HTTP helper.</param>
        /// <param name="configuration"></param>
        /// <param name="openAIService"></param>
        public MainController(IHttpHelper httpHelper, IConfiguration configuration, IOpenAIService openAIService)
        {
            this.httpHelper = httpHelper;
            classifyUrl = configuration["ClassifyUrl"];
            this.service = openAIService;
        }

        /// <summary>
        /// 获取生成内容
        /// </summary>
        /// <param name="classifyModel">请求</param>
        /// <returns></returns>
        [HttpPost("GetGenerateContent")]
        public async Task<ActionResult> GetGenerateContent(ClassifyModel classifyModel)
        {
            var prompt = CommConfig.CommonConfig.PromptConfigs.Find(p => p.ModelCode.Equals(classifyModel.ModelCode, StringComparison.OrdinalIgnoreCase));
            if (prompt == null)
            {
                throw new ServiceException("ModelCode不支持");
            }
            try
            {
                var t1 = LdtCommonUtil.Security.Encrypt.Base64Decrypt(classifyModel.TestText.ToString());
                //判断上传的是否是xml格式
                classifyModel.TestText = XmlDeal.GetXmlContent($"{t1}", classifyModel.ModelCode)[0];
            }
            catch
            {
                ////...
            }
            var rr1 = await GetLLmContent(prompt, classifyModel.TestText.ToString());
            return Ok(new HttpReturnEntity { Code = "1", Body = rr1 });
        }

        /// <summary>
        /// 获取生成内容
        /// </summary>
        /// <param name="classifyModel">请求</param>
        /// <returns></returns>
        [HttpPost("GetQingXingQingJieContent")]
        public async Task<ActionResult> GetQingXingQingJieContent(ClassifyModel classifyModel)
        {
            var prompt = CommConfig.CommonConfig.PromptConfigs.Find(p => p.ModelCode.Equals(classifyModel.ModelCode, StringComparison.OrdinalIgnoreCase));
            if (prompt == null)
            {
                throw new ServiceException("ModelCode不支持");
            }
            List<string> contents = new List<string>();
            List<object> results = new List<object>();
            if (classifyModel.TestText is JArray)
            {
                contents = (classifyModel.TestText as JArray).Select(p => p.ToString()).ToList();
            }
            else
            {
                contents = new List<string>() { classifyModel.TestText.ToString() };
            }
            foreach (var item in contents)
            {
                var rr1 = await GetLLmContent(prompt, item);
                results.Add(rr1.Split("|,".ToArray(), StringSplitOptions.RemoveEmptyEntries).Select(p => new { name = p, probnum = 1 }).ToList());
            }
            return Ok(new
            {
                Result = new { Code = "1" },
                Body = new
                {
                    @class = results
                }
            });
        }

        /// <summary>
        /// 获取生成内容
        /// </summary>
        /// <param name="classifyModel">请求</param>
        /// <returns></returns>
        [HttpPost("Test")]
        public async Task<ActionResult> Test(ClassifyModel classifyModel)
        {
            var prompt = CommConfig.CommonConfig.PromptConfigs.Find(p => p.ModelCode.Equals(classifyModel.ModelCode, StringComparison.OrdinalIgnoreCase));
            if (prompt == null)
            {
                throw new ServiceException("ModelCode不支持");
            }
            var content = prompt.Prompt.Replace("{content}",string.Join(",",classifyModel.TestText.ToString().Split(",").OrderBy(p=>p)));
            var messages = new List<OpenAI.Net.Message>
        {
                OpenAI.Net.Message.Create(ChatRoleType.System, "你是一位审查查明段落的生成助手"),
            OpenAI.Net.Message.Create(ChatRoleType.User, content),
        };
            await foreach (var t in service.Chat.GetStream(messages, o =>
            {
                o.Model = "qwen1half-4b-chat";
                o.Temperature = 0;
                o.MaxTokens = 1024;
                o.FrequencyPenalty = 1.2;
                o.TopP = 0.7;
                o.PresencePenalty = 1.2;
            }))
            {
                Console.Write(t?.Result?.Choices[0].Delta?.Content);
            }
            return Ok("");
        }

        /// <summary>
        /// Gets the content of the l lm.请求llm接口获取结果
        /// </summary>
        /// <param name="prompt">The prompt.</param>
        /// <param name="content">The content.</param>
        /// <returns></returns>
        /// <exception cref="LdtCommonUtil.ServiceException">大模型服务接口失败</exception>
        private async Task<string> GetLLmContent(ModelPromptConfig prompt, string content)
        {
            var p1 = JsonConvert.SerializeObject(new OpenAiRequestModel
            {
                model = "qwen1half-1_8b-chat",
                request_id = LdtCommonUtil.ObjectId.GenerateNewStringId(),
                temperature = 0,
                //top_p = 0.01,
                //top_k = 1,
                messages = new System.Collections.Generic.List<MessagesItem> {
                                    new MessagesItem {
                      role="system",
                       content="You are a helpful assistant."
                 },
                new MessagesItem {
                      role="user",
                       content=prompt.Prompt.Contains("{content}")?prompt.Prompt.Replace("{content}",content):$"{prompt.Prompt}\n{content}"
                 }
                }
            });
            var r1 = await httpHelper.PostAsync(classifyUrl, p1, 600000);
            if (r1.IsSuccessStatusCode)
            {
                var m1 = JsonConvert.DeserializeObject<OpenResponseModel>(await r1.Content.ReadAsStringAsync());
                return m1.choices[0].message.content;
            }
            else
            {
                throw new ServiceException("大模型服务接口失败");
            }
        }
    }
}