#include <iostream>
#include <vector>
#include <string>
#include <cstdlib>
#include <cctype>
#include <thread>
#include <chrono>
#include <fstream>
#include <sstream>
#include <algorithm>
#include <cctype>
#include <functional>
#ifdef _WIN32
#include <windows.h>
#include <shlobj.h>
#else
#include <unistd.h>
#include <sys/wait.h>
#include <pwd.h>
#endif
#include <bits/stdc++.h>
using namespace std;
using namespace std::chrono;

struct ModelInfo {
    string name;
    string size;
    string recommendation;
};

vector<ModelInfo> MODELS = {
    {"deepseek-r1:1.5b", "1.1GB", "推荐"},
    {"deepseek-r1:7b", "4.7GB", "推荐"},
    {"deepseek-r1:8b", "4.9GB", "推荐"},
    {"deepseek-r1:14b", "9GB", "推荐"},
    {"deepseek-r1:32b", "20GB", "不推荐"},
    {"deepseek-r1:70b", "43GB", "不推荐"},
    {"deepseek-r1:671b", "404GB", "不推荐"},
};

bool executeCommand(const string& cmd, string* output = nullptr) {
#ifdef _WIN32
    FILE* pipe = _popen(cmd.c_str(), "r");
#else
    FILE* pipe = popen(cmd.c_str(), "r");
#endif
    if (!pipe) return false;

    char buffer[128];
    if (output) output->clear();
    while (fgets(buffer, sizeof(buffer)), pipe) {
        if (output) *output += buffer;
    }

#ifdef _WIN32
    int status = _pclose(pipe);
#else
    int status = pclose(pipe);
#endif
    return WIFEXITED(status) && WEXITSTATUS(status) == 0;
}

bool checkOllamaInstalled() {
    return executeCommand("ollama --version");
}

string getTempPath() {
#ifdef _WIN32
    char path[MAX_PATH];
    if (GetTempPathA(MAX_PATH, path)) {
        return string(path);
    }
    return "C:\\Temp\\";
#else
    return "/tmp/";
#endif
}

string downloadOllama() {
    string osName;
#ifdef _WIN32
    osName = "Windows";
#elif __APPLE__
    osName = "Darwin";
#else
    osName = "Linux";
#endif

    string url, filename, path;
    
    if (osName == "Windows") {
        url = "https://ghproxy.cfd/https:/github.com/ollama/ollama/releases/download/v0.6.8/OllamaSetup.exe";
        filename = "OllamaSetup.exe";
    } else if (osName == "Linux") {
        url = "https://ollama.com/install.sh";
        filename = "install.sh";
    } else if (osName == "Darwin") {
        url = "https://github.moeyy.xyz/https://github.com/ollama/ollama/releases/download/v0.5.7/Ollama-darwin.zip";
        filename = "Ollama-darwin";
    } else {
        cerr << "不支持的操作系统" << endl;
        exit(1);
    }

    path = getTempPath() + filename;
    cout << "正在下载Ollama安装包..." << endl;

    string downloadCmd;
    if (osName == "Windows") {
        downloadCmd = "curl -Lo \"" + path + "\" \"" + url + "\"";
    } else {
        downloadCmd = "curl -fsSL \"" + url + "\" -o \"" + path + "\"";
    }

    if (!executeCommand(downloadCmd)) {
        cerr << "下载失败，请检查网络连接" << endl;
        exit(1);
    }

    return path;
}

bool installOllama() {
    string path = downloadOllama();
    cout << "正在安装Ollama..." << endl;

    string installCmd;
#ifdef _WIN32
    installCmd = "\"" + path + "\" /S";
#elif __APPLE__
    installCmd = "chmod +x \"" + path + "\" && sudo \"" + path + "\"";
#else
    installCmd = "chmod +x \"" + path + "\" && sudo bash \"" + path + "\"";
#endif

    if (!executeCommand(installCmd)) {
        cerr << "安装失败" << endl;
        return false;
    }

    this_thread::sleep_for(seconds(5));
    return checkOllamaInstalled();
}

void printModels() {
    cout << "可用模型列表：" << endl;
    for (int i = 0; i < MODELS.size(); ++i) {
        cout << (i+1) << ". " 
             << MODELS[i].name 
             << "\t" << MODELS[i].size 
             << "\t" << MODELS[i].recommendation << endl;
    }
}

int getModelChoice() {
    int choice;
    while (true) {
        cout << "请选择要安装的模型编号 (1-" << MODELS.size() << "): ";
        cin >> choice;
        
        if (cin.fail()) {
            cin.clear();
            cin.ignore(numeric_limits<streamsize>::max(), '\n');
            cout << "请输入有效的数字" << endl;
        } else if (choice < 1 || choice > MODELS.size()) {
            cout << "请输入1-" << MODELS.size() << "之间的数字" << endl;
        } else {
            break;
        }
    }
    return choice - 1;
}

bool isModelInstalled(const string& modelName) {
    string output;
    if (!executeCommand("ollama list", &output)) {
        return false;
    }
    return output.find(modelName) != string::npos;
}

bool installModel(const string& modelName, int retries = 2) {
    for (int attempt = 0; attempt < retries; ++attempt) {
        cout << "正在安装模型 " << modelName 
             << " (尝试 " << (attempt+1) << "/" << retries << ")... 请耐心等待" << endl;
        
        string command = "ollama run " + modelName;
        if (executeCommand(command)) {
            return true;
        }
    }
    return false;
}

int main() {
    // 检查Ollama安装
    if (!checkOllamaInstalled()) {
        cout << "Ollama未安装，开始安装..." << endl;
        while (!installOllama()) {
            cout << "正在重试安装..." << endl;
        }
        cout << "Ollama安装成功!" << endl;
    }

    // 打印模型列表
    printModels();

    // 获取用户选择
    int choice = getModelChoice();
    ModelInfo selected = MODELS[choice];

    // 检查模型是否已安装
    if (isModelInstalled(selected.name)) {
        cout << "模型 " << selected.name << " 已经安装" << endl;
        return 0;
    }

    // 安装模型
    if (installModel(selected.name)) {
        cout << "模型 " << selected.name << " 安装成功!" << endl;
    } else {
        cerr << "模型 " << selected.name << " 安装失败，请检查网络或磁盘空间" << endl;
    }

    return 0;
}