import axios from 'axios';

export interface OllamaMessage {
  role: 'system' | 'user' | 'assistant';
  content: string;
}

export interface OllamaRequest {
  model: string;
  messages: OllamaMessage[];
  stream?: boolean;
  options?: {
    temperature?: number;
    max_tokens?: number;
  };
}

export interface OllamaResponse {
  model: string;
  created_at: string;
  message: {
    role: string;
    content: string;
  };
  done: boolean;
}

export class LocalOllamaClient {
  private baseUrl: string;

  constructor(baseUrl: string = 'http://localhost:11434') {
    this.baseUrl = baseUrl;
  }

  async chatCompletion(request: OllamaRequest): Promise<OllamaResponse> {
    try {
      const response = await axios.post(`${this.baseUrl}/api/chat`, {
        model: request.model,
        messages: request.messages,
        stream: false,
        options: request.options || {}
      });

      return response.data;
    } catch (error) {
      console.error('Ollama API error:', error);
      throw new Error(`Failed to get response from local model: ${error}`);
    }
  }

  async *chatCompletionStream(request: OllamaRequest): AsyncGenerator<OllamaResponse, void, unknown> {
    try {
      const response = await axios.post(`${this.baseUrl}/api/chat`, {
        model: request.model,
        messages: request.messages,
        stream: true,
        options: request.options || {}
      }, {
        responseType: 'stream'
      });

      const stream = response.data;
      
      for await (const chunk of stream) {
        const lines = chunk.toString().split('\n').filter((line: string) => line.trim() !== '');
        
        for (const line of lines) {
          try {
            const data = JSON.parse(line);
            if (data.message) {
              yield {
                model: data.model,
                created_at: data.created_at,
                message: data.message,
                done: data.done
              };
            }
          } catch (parseError) {
            // Skip invalid JSON lines
            continue;
          }
        }
      }
    } catch (error) {
      console.error('Ollama streaming error:', error);
      throw new Error(`Failed to get streaming response from local model: ${error}`);
    }
  }

  async listModels(): Promise<string[]> {
    try {
      const response = await axios.get(`${this.baseUrl}/api/tags`);
      return response.data.models.map((model: any) => model.name);
    } catch (error) {
      console.error('Failed to list models:', error);
      return [];
    }
  }
}
