package com.cyx.exercise.ollama;

import java.io.IOException;
import java.util.Scanner;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;

/**
 * Ollama API使用示例
 * 展示如何使用Java原生HTTP客户端调用Ollama接口
 */
public class OllamaExample {
    
    private static final String OLLAMA_BASE_URL = "http://localhost:11434";
    private static final String DEFAULT_MODEL = "qwen3:4b"; // 可以根据实际安装的模型调整
    
    public static void main(String[] args) {
        OllamaExample example = new OllamaExample();
        
        System.out.println("=== Ollama Java原生HTTP客户端示例 ===");
        System.out.println("确保Ollama服务正在运行在: " + OLLAMA_BASE_URL);
        System.out.println();
        
        try {
            // 1. 测试获取模型列表
            example.testGetModels();
            
            // 2. 测试同步生成
            example.testSyncGenerate();
            
            // 3. 测试异步生成
            example.testAsyncGenerate();
            
            // 4. 测试流式生成
            example.testStreamGenerate();
            
            // 5. 测试聊天接口
            example.testChat();
            
            // 6. 交互式聊天
            example.interactiveChat();
            
        } catch (Exception e) {
            System.err.println("示例执行出错: " + e.getMessage());
            e.printStackTrace();
        }
    }
    
    /**
     * 测试获取模型列表
     */
    public void testGetModels() {
        System.out.println("1. 测试获取模型列表...");
        try {
            OllamaClient client = new OllamaClient(OLLAMA_BASE_URL);
            String models = client.getModels();
            System.out.println("可用模型: " + models);
            System.out.println();
        } catch (Exception e) {
            System.err.println("获取模型列表失败: " + e.getMessage());
            System.out.println();
        }
    }
    
    /**
     * 测试同步生成
     */
    public void testSyncGenerate() {
        System.out.println("2. 测试同步生成...");
        try {
            OllamaClient client = new OllamaClient(OLLAMA_BASE_URL);
            OllamaRequest request = new OllamaRequest(DEFAULT_MODEL, "请用中文简单介绍一下Java编程语言");
            
            long startTime = System.currentTimeMillis();
            OllamaResponse response = client.generate(request);
            long endTime = System.currentTimeMillis();
            
            System.out.println("响应: " + response.getResponse());
            System.out.println("耗时: " + (endTime - startTime) + "ms");
            System.out.println("完成状态: " + response.isDone());
            System.out.println();
        } catch (Exception e) {
            System.err.println("同步生成失败: " + e.getMessage());
            System.out.println();
        }
    }
    
    /**
     * 测试异步生成
     */
    public void testAsyncGenerate() {
        System.out.println("3. 测试异步生成...");
        try {
            OllamaClient client = new OllamaClient(OLLAMA_BASE_URL);
            OllamaRequest request = new OllamaRequest(DEFAULT_MODEL, "什么是人工智能？请简要说明。");
            
            long startTime = System.currentTimeMillis();
            CompletableFuture<OllamaResponse> future = client.generateAsync(request);
            
            System.out.println("异步请求已发送，等待响应...");
            
            future.thenAccept(response -> {
                long endTime = System.currentTimeMillis();
                System.out.println("异步响应: " + response.getResponse());
                System.out.println("异步耗时: " + (endTime - startTime) + "ms");
                System.out.println("异步完成状态: " + response.isDone());
            }).exceptionally(throwable -> {
                System.err.println("异步生成失败: " + throwable.getMessage());
                return null;
            });
            
            // 等待异步完成
            future.join();
            System.out.println();
        } catch (Exception e) {
            System.err.println("异步生成失败: " + e.getMessage());
            System.out.println();
        }
    }
    
    /**
     * 测试流式生成
     */
    public void testStreamGenerate() {
        System.out.println("4. 测试流式生成...");
        try {
            OllamaClient client = new OllamaClient(OLLAMA_BASE_URL);
            OllamaRequest request = new OllamaRequest(DEFAULT_MODEL, "请写一首关于春天的短诗");
            
            CountDownLatch latch = new CountDownLatch(1);
            StringBuilder fullResponse = new StringBuilder();
            
            System.out.println("流式响应开始:");
            System.out.print(">>> ");
            
            client.generateStream(request, 
                chunk -> {
                    System.out.print(chunk);
                    fullResponse.append(chunk);
                },
                error -> {
                    System.err.println("\n流式生成出错: " + error.getMessage());
                    latch.countDown();
                }
            );
            
            // 简单等待流式响应完成（实际应用中可以用更好的方式）
            Thread.sleep(10000); // 等待10秒
            
            System.out.println("\n流式响应完成");
            System.out.println("完整响应: " + fullResponse.toString());
            System.out.println();
        } catch (Exception e) {
            System.err.println("流式生成失败: " + e.getMessage());
            System.out.println();
        }
    }
    
    /**
     * 测试聊天接口
     */
    public void testChat() {
        System.out.println("5. 测试聊天接口...");
        try {
            OllamaClient client = new OllamaClient(OLLAMA_BASE_URL);
            OllamaChatRequest request = new OllamaChatRequest(DEFAULT_MODEL);
            
            // 添加系统消息
            request.addSystemMessage("你是一个有用的AI助手，请用中文回答问题。");
            
            // 添加用户消息
            request.addUserMessage("请解释一下什么是RESTful API？");
            
            OllamaChatResponse response = client.chat(request);
            
            System.out.println("聊天响应:");
            System.out.println("角色: " + response.getMessage().getRole());
            System.out.println("内容: " + response.getMessage().getContent());
            System.out.println("完成状态: " + response.isDone());
            System.out.println();
        } catch (Exception e) {
            System.err.println("聊天接口测试失败: " + e.getMessage());
            System.out.println();
        }
    }
    
    /**
     * 交互式聊天
     */
    public void interactiveChat() {
        System.out.println("6. 交互式聊天 (输入 'quit' 退出)...");
        
        try {
            OllamaClient client = new OllamaClient(OLLAMA_BASE_URL);
            Scanner scanner = new Scanner(System.in);
            OllamaChatRequest chatRequest = new OllamaChatRequest(DEFAULT_MODEL);
            
            // 添加系统消息
            chatRequest.addSystemMessage("你是一个有用的AI助手，请用中文简洁地回答问题。");
            
            while (true) {
                System.out.print("用户: ");
                String userInput = scanner.nextLine().trim();
                
                if ("quit".equalsIgnoreCase(userInput)) {
                    System.out.println("聊天结束。");
                    break;
                }
                
                if (userInput.isEmpty()) {
                    continue;
                }
                
                // 添加用户消息
                chatRequest.addUserMessage(userInput);
                
                try {
                    // 发送聊天请求
                    OllamaChatResponse response = client.chat(chatRequest);
                    String assistantResponse = response.getMessage().getContent();
                    
                    System.out.println("助手: " + assistantResponse);
                    
                    // 将助手的回复添加到对话历史中
                    chatRequest.addAssistantMessage(assistantResponse);
                    
                } catch (Exception e) {
                    System.err.println("聊天请求失败: " + e.getMessage());
                }
                
                System.out.println();
            }
            
        } catch (Exception e) {
            System.err.println("交互式聊天失败: " + e.getMessage());
        }
    }
}
