import { Injectable, HttpException, HttpStatus } from '@nestjs/common';
import axios, { AxiosInstance } from 'axios';

export interface OllamaMessage {
  role: 'user' | 'assistant' | 'system';
  content: string;
}

export interface OllamaChatRequest {
  model: string;
  messages: OllamaMessage[];
  stream?: boolean;
}

export interface OllamaModel {
  name: string;
  modified_at: string;
  size: number;
  digest: string;
  details: {
    parent_model: string;
    format: string;
    family: string;
    families: string[];
    parameter_size: string;
    quantization_level: string;
  };
}

@Injectable()
export class OllamaService {
  private readonly client: AxiosInstance;
  private readonly baseURL: string;

  constructor() {
    this.baseURL = process.env.OLLAMA_URL || 'http://192.168.50.218:11434';
    this.client = axios.create({
      baseURL: this.baseURL,
      timeout: 300000, // 5 minutes timeout for long responses
      headers: {
        'Content-Type': 'application/json',
      },
    });
  }

  /**
   * 获取所有可用的模型列表
   */
  async getModels(): Promise<OllamaModel[]> {
    try {
      const response = await this.client.get('/api/tags');
      return response.data.models || [];
    } catch (error) {
      console.error('Error fetching models from Ollama:', error);
      throw new HttpException(
        '无法连接到 Ollama 服务，请确保 Ollama 正在运行',
        HttpStatus.SERVICE_UNAVAILABLE,
      );
    }
  }

  /**
   * 发送聊天消息（非流式）
   */
  async chat(
    model: string,
    messages: OllamaMessage[],
    stream: boolean = false,
  ): Promise<string> {
    try {
      const request: OllamaChatRequest = {
        model,
        messages,
        stream: false,
      };

      const response = await this.client.post('/api/chat', request);
      return response.data.message?.content || '';
    } catch (error) {
      console.error('Error calling Ollama chat API:', error);
      if (error.response) {
        throw new HttpException(
          error.response.data?.error || 'Ollama API 调用失败',
          error.response.status || HttpStatus.INTERNAL_SERVER_ERROR,
        );
      }
      throw new HttpException(
        '无法连接到 Ollama 服务',
        HttpStatus.SERVICE_UNAVAILABLE,
      );
    }
  }

  /**
   * 流式发送聊天消息
   */
  async *chatStream(
    model: string,
    messages: OllamaMessage[],
  ): AsyncGenerator<string, void, unknown> {
    try {
      const request: OllamaChatRequest = {
        model,
        messages,
        stream: true,
      };

      const response = await this.client.post('/api/chat', request, {
        responseType: 'stream',
      });

      let buffer = '';
      for await (const chunk of response.data) {
        buffer += chunk.toString();
        const lines = buffer.split('\n');
        buffer = lines.pop() || '';

        for (const line of lines) {
          if (line.trim()) {
            try {
              const data = JSON.parse(line);
              if (data.message?.content) {
                yield data.message.content;
              }
              if (data.done) {
                return;
              }
            } catch (e) {
              // 忽略解析错误
            }
          }
        }
      }
    } catch (error) {
      console.error('Error calling Ollama chat stream API:', error);
      throw error;
    }
  }

  /**
   * 检查 Ollama 服务是否可用
   */
  async healthCheck(): Promise<boolean> {
    try {
      await this.client.get('/api/tags');
      return true;
    } catch (error) {
      return false;
    }
  }
}

