import 'dart:convert';

import 'package:chat_ai/src/services/request.dart';
import 'package:chat_ai/src/utils/response_parser.dart';
import 'package:http/http.dart' as http;

import 'package:chat_ai/src/models/session_model.dart';
import 'package:chat_ai/src/models/llm_response.dart';

Future<LlmResponse> getLlmResponse(
    SessionModel sessionModel, List history) async {
  String url =
      'http://${sessionModel.host}:${sessionModel.port}/v1/chat/completions';
  http.Response response = await httpPost(
    url,
    <String, String>{
      'Content-Type': 'application/json; charset=UTF-8',
      'Accept': 'application/json',
    },
    jsonEncode(
      <String, dynamic>{
        "model": "/models/Qwen2-7B-Instruct",
        "messages": history,
      },
    ),
  );
  if (response.statusCode == 200) {
    return LlmResponse.fromJson(
      ResponseParser.parseJsonBody(response),
    );
  }
  throw Exception('Failed to create llm response.$response');
}
