
import sys
import platform
import psutil
import webbrowser
import os
import requests
import subprocess
from PyQt5.QtWidgets import (QApplication, QMainWindow, QWidget, QVBoxLayout, 
                            QComboBox, QPushButton, QLabel, QTextEdit, QGroupBox,
                            QHBoxLayout, QMessageBox, QProgressBar)
from PyQt5.QtCore import Qt, QThread, pyqtSignal

class DownloadThread(QThread):
    progress = pyqtSignal(int)
    finished = pyqtSignal()
    error = pyqtSignal(str)

    def __init__(self, url, save_path):
        super().__init__()
        self.url = url
        self.save_path = save_path

    def run(self):
        try:
            response = requests.get(self.url, stream=True)
            total_size = int(response.headers.get('content-length', 0))
            block_size = 1024
            downloaded = 0

            with open(self.save_path, 'wb') as f:
                for data in response.iter_content(block_size):
                    downloaded += len(data)
                    f.write(data)
                    progress = int((downloaded / total_size) * 100)
                    self.progress.emit(progress)

            self.finished.emit()
        except Exception as e:
            self.error.emit(str(e))

class OllamaInstaller(QMainWindow):
    def __init__(self):
        super().__init__()
        self.setWindowTitle('Ollama 安装助手')
        self.setGeometry(100, 100, 1000, 800)
        
        # 创建中央部件和布局
        central_widget = QWidget()
        self.setCentralWidget(central_widget)
        main_layout = QVBoxLayout(central_widget)
        
        # 系统信息显示
        self.system_info = QTextEdit()
        self.system_info.setReadOnly(True)
        self.system_info.setMaximumHeight(150)
        main_layout.addWidget(QLabel('系统配置信息：'))
        main_layout.addWidget(self.system_info)
        
        # 创建水平布局
        selection_layout = QHBoxLayout()
        
        # Ollama版本选择组
        ollama_group = QGroupBox('Ollama 版本选择')
        ollama_layout = QVBoxLayout()
        
        self.ollama_combo = QComboBox()
        self.ollama_versions = [
            'Ollama 0.1.14 (推荐)',
            'Ollama 0.1.13',
            'Ollama 0.1.12'
        ]
        self.ollama_combo.addItems(self.ollama_versions)
        ollama_layout.addWidget(QLabel('选择Ollama版本：'))
        ollama_layout.addWidget(self.ollama_combo)
        
        self.ollama_info = QTextEdit()
        self.ollama_info.setReadOnly(True)
        self.ollama_info.setMaximumHeight(100)
        ollama_layout.addWidget(QLabel('版本说明：'))
        ollama_layout.addWidget(self.ollama_info)
        
        ollama_group.setLayout(ollama_layout)
        selection_layout.addWidget(ollama_group)
        
        # Deepseek模型选择组
        deepseek_group = QGroupBox('Deepseek 模型选择')
        deepseek_layout = QVBoxLayout()
        
        self.model_combo = QComboBox()
        self.models = [
            'deepseek-coder-33b-instruct (高性能)',
            'deepseek-coder-6.7b-instruct (推荐)',
            'deepseek-coder-1.3b-instruct (轻量)'
        ]
        self.model_combo.addItems(self.models)
        deepseek_layout.addWidget(QLabel('选择Deepseek模型：'))
        deepseek_layout.addWidget(self.model_combo)
        
        self.model_info = QTextEdit()
        self.model_info.setReadOnly(True)
        self.model_info.setMaximumHeight(100)
        deepseek_layout.addWidget(QLabel('模型说明：'))
        deepseek_layout.addWidget(self.model_info)
        
        deepseek_group.setLayout(deepseek_layout)
        selection_layout.addWidget(deepseek_group)
        
        main_layout.addLayout(selection_layout)
        
        # ChatBox安装组
        ChatBox_group = QGroupBox('ChatBox 聊天工具')
        ChatBox_layout = QVBoxLayout()
        
        ChatBox_info = QTextEdit()
        ChatBox_info.setReadOnly(True)
        ChatBox_info.setMaximumHeight(100)
        ChatBox_info.setText("""
ChatBox是一个开源的AI聊天工具，支持：
• Ollama和Deepseek模型
• 代码高亮显示
• 历史消息保存
• 快捷键操作
• 自定义提示词
• 多会话管理
""")
        ChatBox_layout.addWidget(QLabel('ChatBox说明：'))
        ChatBox_layout.addWidget(ChatBox_info)
        
        # 下载进度条
        self.progress_bar = QProgressBar()
        self.progress_bar.setVisible(False)
        ChatBox_layout.addWidget(self.progress_bar)
        
        ChatBox_group.setLayout(ChatBox_layout)
        main_layout.addWidget(ChatBox_group)
        
        # 建议版本显示
        self.suggestion_label = QLabel()
        self.suggestion_label.setStyleSheet("QLabel { color: blue; font-weight: bold; }")
        main_layout.addWidget(self.suggestion_label)
        
        # 安装按钮
        self.install_button = QPushButton('一键安装Ollama和模型及ChatBox')
        self.install_button.setStyleSheet("QPushButton { font-size: 14pt; padding: 10px; }")
        self.install_button.clicked.connect(self.start_installation)
        main_layout.addWidget(self.install_button)
        
        # 初始化信息显示
        self.update_system_info()
        self.update_suggestion()
        self.model_combo.currentIndexChanged.connect(self.update_model_info)
        self.ollama_combo.currentIndexChanged.connect(self.update_ollama_info)
        self.update_model_info()
        self.update_ollama_info()
        
        # 设置推荐的默认值
        self.set_recommended_defaults()

    def start_installation(self):
        """开始完整安装流程"""
        try:
            # 获取选择的模型
            selected_model = self.model_combo.currentText()
            
            # 检查Ollama是否已安装
            ollama_installed = self.check_ollama_installed()
            
            # 1. 安装 Ollama（如果未安装）
            if not ollama_installed:
                QMessageBox.information(self, '提示', '开始安装 Ollama 服务...')
                if platform.system() == 'Windows':
                    self.install_ollama_windows()
                elif platform.system() == 'Linux':
                    install_cmd = "curl -fsSL https://gitee.com/mirrors/ollama/raw/main/install.sh | sh"
                    subprocess.run(install_cmd, shell=True, check=True)
                elif platform.system() == 'Darwin':  # macOS
                    if not self.check_homebrew():
                        self.install_homebrew()
                    subprocess.run(['brew', 'install', 'ollama'], check=True)
                QMessageBox.information(self, '成功', 'Ollama服务安装成功！')

            # 2. 下载选择的模型
            if not self.check_model_installed(selected_model):
                QMessageBox.information(self, '提示', f'开始下载AI模型 {selected_model}...')
                try:
                    subprocess.run(['ollama', 'pull', selected_model], check=True)
                    QMessageBox.information(self, '成功', f'AI模型 {selected_model} 下载成功！')
                except subprocess.CalledProcessError as e:
                    QMessageBox.critical(self, '错误', f'AI模型下载失败：{str(e)}')
                    return

            # 3. 下载并安装ChatBox客户端
            QMessageBox.information(self, '提示', '开始下载ChatBox聊天界面...')
            try:
                if platform.system() == 'Linux':
                    # 下载 Linux AppImage 版本
                    subprocess.run(['wget', 'https://mirror.ghproxy.com/https://github.com/Bin-Huang/chatbox/releases/latest/download/Chatbox.AppImage'], check=True)
                    subprocess.run(['chmod', '+x', 'Chatbox.AppImage'], check=True)
                    QMessageBox.information(self, '成功', 'ChatBox已下载完成！\n请手动运行Chatbox.AppImage启动程序')
                elif platform.system() == 'Windows':
                    # 下载 Windows 版本的 ChatBox
                    download_cmd = "Invoke-WebRequest -Uri 'https://mirror.ghproxy.com/https://github.com/Bin-Huang/chatbox/releases/latest/download/Chatbox_windows_x86_64.exe' -OutFile 'Chatbox.exe'"
                    subprocess.run(['powershell', '-Command', download_cmd], check=True)
                    QMessageBox.information(self, '成功', 'ChatBox聊天界面下载完成！\n请手动运行Chatbox.exe启动程序')
                    
                    # 尝试自动运行ChatBox
                    try:
                        subprocess.Popen(['Chatbox.exe'])
                    except:
                        pass  # 如果自动运行失败，用户可以手动运行
                elif platform.system() == 'Darwin':  # macOS
                    subprocess.run(['wget', 'https://mirror.ghproxy.com/https://github.com/Bin-Huang/chatbox/releases/latest/download/Chatbox.dmg'], check=True)
                    QMessageBox.information(self, '成功', 'ChatBox已下载完成！\n请手动打开Chatbox.dmg安装程序')
            except subprocess.CalledProcessError as e:
                QMessageBox.critical(self, '错误', f'ChatBox下载失败：{str(e)}')
                return

            QMessageBox.information(self, '完成', '所有安装任务已完成！\n1. Ollama服务已安装\n2. AI模型已下载\n3. ChatBox聊天界面已下载')

        except Exception as e:
            QMessageBox.critical(self, '错误', f'安装过程出现错误：{str(e)}')

    def set_recommended_defaults(self):
        """设置推荐的默认值"""
        memory = psutil.virtual_memory()
        memory_gb = memory.total / (1024 ** 3)
        cpu_count = psutil.cpu_count()
        
        # 设置Ollama默认版本
        self.ollama_combo.setCurrentText('Ollama 0.1.14 (推荐)')
        
        # 根据配置设置推荐的Deepseek模型
        if memory_gb >= 32 and cpu_count >= 8:
            self.model_combo.setCurrentText('deepseek-coder-33b-instruct (高性能)')
        elif memory_gb >= 16 and cpu_count >= 4:
            self.model_combo.setCurrentText('deepseek-coder-6.7b-instruct (推荐)')
        else:
            self.model_combo.setCurrentText('deepseek-coder-1.3b-instruct (轻量)')
    
    def update_progress(self, value):
        """更新下载进度"""
        self.progress_bar.setValue(value)
    
    def download_finished(self):
        """下载完成后的处理"""
        self.progress_bar.setVisible(False)
        self.download_button.setEnabled(True)
        QMessageBox.information(self, "下载完成", 
                              "ChatBox已下载完成！\n请在下载文件夹中找到安装包进行安装。")
    
    def download_error(self, error_msg):
        """下载错误处理"""
        self.progress_bar.setVisible(False)
        self.download_button.setEnabled(True)
        QMessageBox.critical(self, "下载错误", f"下载过程中出现错误：\n{error_msg}")
    
    def update_system_info(self):
        """更新系统信息显示"""
        cpu_count = psutil.cpu_count()
        memory = psutil.virtual_memory()
        memory_gb = memory.total / (1024 ** 3)
        
        system_info = f"""
当前系统配置：
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
操作系统: {platform.system()} {platform.version()}
CPU核心数: {cpu_count} 核
内存大小: {memory_gb:.1f} GB
Python版本: {platform.python_version()}
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
        """
        self.system_info.setText(system_info)
        
    def update_suggestion(self):
        """根据系统配置更新建议版本"""
        memory = psutil.virtual_memory()
        memory_gb = memory.total / (1024 ** 3)
        cpu_count = psutil.cpu_count()
        
        suggestion_text = f"""
推荐配置：
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
推荐Ollama版本：Ollama 0.1.14
原因：最新版本，功能最完善

推荐Deepseek模型：{self.get_recommended_model(memory_gb, cpu_count)}
原因：{self.get_model_reason(memory_gb, cpu_count)}
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
"""
        self.suggestion_label.setText(suggestion_text)
    
    def get_recommended_model(self, memory_gb, cpu_count):
        """获取推荐的模型"""
        if memory_gb >= 32 and cpu_count >= 8:
            return "deepseek-coder-33b-instruct"
        elif memory_gb >= 16 and cpu_count >= 4:
            return "deepseek-coder-6.7b-instruct"
        else:
            return "deepseek-coder-1.3b-instruct"
    
    def get_model_reason(self, memory_gb, cpu_count):
        """获取推荐原因"""
        if memory_gb >= 32 and cpu_count >= 8:
            return "您的系统配置较高，可以运行最大模型获得最佳效果"
        elif memory_gb >= 16 and cpu_count >= 4:
            return "您的系统配置适中，建议使用中等规模模型"
        else:
            return "根据您的系统配置，建议使用轻量级模型以确保流畅运行"
    
    def update_model_info(self):
        """更新Deepseek模型信息说明"""
        model = self.model_combo.currentText()
        info = {
            'deepseek-coder-33b-instruct (高性能)': """
• 模型大小：33B参数
• 内存要求：32GB及以上
• CPU要求：建议8核以上
• 特点：效果最好，响应最准确
• 适用场景：复杂编程任务
""",
            'deepseek-coder-6.7b-instruct (推荐)': """
• 模型大小：6.7B参数
• 内存要求：16GB及以上
• CPU要求：建议4核以上
• 特点：性能与资源平衡
• 适用场景：日常开发使用
""",
            'deepseek-coder-1.3b-instruct (轻量)': """
• 模型大小：1.3B参数
• 内存要求：8GB及以上
• CPU要求：2核以上
• 特点：运行轻快
• 适用场景：简单编程任务
"""
        }
        self.model_info.setText(info[model])
        
    def update_ollama_info(self):
        """更新Ollama版本信息说明"""
        version = self.ollama_combo.currentText()
        info = {
            'Ollama 0.1.14 (推荐)': """
• 最新版本
• 支持更多模型
• 性能优化
• 建议配置：4核CPU，16GB内存
""",
            'Ollama 0.1.13': """
• 稳定版本
• 广泛验证
• 兼容性好
• 建议配置：2核CPU，8GB内存
""",
            'Ollama 0.1.12': """
• 早期版本
• 基础功能
• 轻量运行
• 建议配置：2核CPU，8GB内存
"""
        }
        self.ollama_info.setText(info[version])

    def check_ollama_installed(self):
        """检查 Ollama 是否已安装"""
        try:
            subprocess.run(['ollama', '--version'], capture_output=True, check=True)
            return True
        except (subprocess.CalledProcessError, FileNotFoundError):
            return False

    def check_model_installed(self, model_name):
        """检查指定的模型是否已下载"""
        try:
            result = subprocess.run(['ollama', 'list'], capture_output=True, text=True, check=True)
            return model_name in result.stdout
        except (subprocess.CalledProcessError, FileNotFoundError):
            return False

    def check_homebrew(self):
        """检查是否安装了Homebrew（仅用于macOS）"""
        try:
            subprocess.run(['brew', '--version'], capture_output=True, check=True)
            return True
        except (subprocess.CalledProcessError, FileNotFoundError):
            return False

    def install_homebrew(self):
        """安装Homebrew（仅用于macOS）"""
        install_cmd = '/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"'
        subprocess.run(install_cmd, shell=True, check=True)

    def install_ollama_windows(self):
        """在Windows上安装Ollama"""
        # Windows安装逻辑
        QMessageBox.information(self, '提示', 'Windows版本的安装方法请参考官方文档：\nhttps://github.com/ollama/ollama')

if __name__ == '__main__':
    app = QApplication(sys.argv)
    window = OllamaInstaller()
    window.show()
    sys.exit(app.exec_())
