import 'dart:convert';
import 'dart:io'; // 修改导入方式
import 'dart:async' show TimeoutException; // Add this import
import 'package:http/http.dart' as http;
import 'package:logging/logging.dart';

// 将异常类移到类外部
class OllamaException implements Exception {
  final String message;
  final int? statusCode;

  OllamaException(this.message, {this.statusCode});
}

class OllamaService {
  final String baseUrl = 'http://localhost:11434'; // 修改为你的ollama地址
  String currentModel = 'llama:7b'; // 修改为你的默认模型
  final _logger = Logger('OllamaService');

  // 获取可用模型列表
  Future<List<String>> getAvailableModels() async {
    try {
      final response = await http.get(
        Uri.parse('$baseUrl/api/tags'),
        headers: {'Content-Type': 'application/json'},
      );

      if (response.statusCode == 200) {
        final data = jsonDecode(response.body);
        final models = (data['models'] as List?)
                ?.map((m) => m['name'].toString())
                .toList() ??
            [];
        _logger.info('Available models: $models'); // 调试输出
        return models;
      } else {
        _logger
            .warning('Failed to fetch models: ${response.statusCode}'); // 调试输出
        throw Exception('Failed to fetch models: ${response.statusCode}');
      }
    } catch (e) {
      if (e is SocketException) {
        throw OllamaException('网络连接失败');
      }
      _logger.warning('Error fetching models: $e'); // 调试输出
      return ['llama:7b']; // 返回默认模型
    }
  }

  // 设置当前使用的模型
  void setModel(String modelName) {
    currentModel = modelName;
    _logger.info('Model set to: $modelName'); // 调试输出
  }

  // 添加更细致的错误处理
  Future<String> generateResponse(String prompt) async {
    try {
      _logger.info('Sending request to Ollama...'); // 调试输出
      _logger.info('Using model: $currentModel'); // 调试输出
      _logger.info('Prompt: $prompt'); // 调试输出

      final response = await http.post(
        Uri.parse('$baseUrl/api/generate'),
        headers: {'Content-Type': 'application/json'},
        body: jsonEncode({
          'model': currentModel,
          'prompt': prompt,
          'stream': false,
          'options': {
            'temperature': 0.7,
            'top_p': 0.9,
            'max_tokens': 2000,
          }
        }),
      );

      _logger.info('Response status: ${response.statusCode}'); // 调试输出
      _logger.info('Response body: ${response.body}'); // 调试输出

      if (response.statusCode == 200) {
        final data = jsonDecode(response.body);
        String responseText = (data['response'] ?? 'No response').toString();

        // 处理 <think> 标签
        RegExp thinkRegex = RegExp(r'<think>(.*?)</think>', dotAll: true);
        responseText = responseText.replaceAllMapped(thinkRegex, (match) {
          String thinkContent = match.group(1) ?? '';
          return '思考过程：\n$thinkContent\n\n回答：';
        });

        return responseText;
      } else if (response.statusCode == 404) {
        return '错误: 模型 $currentModel 未找到，请确保已下载该模型';
      } else {
        throw Exception('请求失败: ${response.statusCode}');
      }
    } catch (e) {
      if (e is SocketException) {
        throw OllamaException('网络连接失败: ${e.message}');
      } else if (e is TimeoutException) {
        throw OllamaException('请求超时');
      }
      throw OllamaException('未知错误: $e');
    }
  }

  // 下载新模型
  Future<bool> downloadModel(String modelName) async {
    try {
      _logger.info('Downloading model: $modelName'); // 调试输出
      final response = await http.post(
        Uri.parse('$baseUrl/api/pull'),
        headers: {'Content-Type': 'application/json'},
        body: jsonEncode({
          'name': modelName,
        }),
      );

      _logger.info('Download response: ${response.statusCode}'); // 调试输出
      return response.statusCode == 200;
    } catch (e) {
      _logger.warning('Error downloading model: $e'); // 调试输出
      return false;
    }
  }

  Stream<String> generateStreamResponse(String prompt) async* {
    try {
      final request = http.Request('POST', Uri.parse('$baseUrl/api/generate'));
      request.headers['Content-Type'] = 'application/json';
      request.body = jsonEncode({
        'model': currentModel,
        'prompt': prompt,
        'stream': true,
        'options': {
          'temperature': 0.7,
          'top_p': 0.9,
        }
      });

      final response = await http.Client().send(request);

      if (response.statusCode == 200) {
        await for (var chunk in response.stream.transform(utf8.decoder)) {
          // 处理每个数据块
          final lines = chunk.split('\n');
          for (var line in lines) {
            if (line.trim().isEmpty) continue;
            try {
              final data = jsonDecode(line);
              final response = data['response'] as String?;
              if (response != null && response.isNotEmpty) {
                yield response;
              }
            } catch (e) {
              _logger.warning('Error parsing JSON: $e');
              continue;
            }
          }
        }
      } else {
        throw OllamaException('请求失败: ${response.statusCode}');
      }
    } catch (e) {
      if (e is SocketException) {
        throw OllamaException('网络连接失败');
      }
      rethrow;
    }
  }
}
