File size: 4,135 Bytes
44b5c36
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
#!/usr/bin/env python3
"""
Installation verification script for llama.cpp in Hugging Face Space
Run this to verify that llama.cpp is properly installed and configured
"""

import subprocess
import sys
import os

def run_command(command, description):
    """Run a command and return success status"""
    print(f"πŸ” {description}...")
    try:
        result = subprocess.run(command, shell=True, capture_output=True, text=True, timeout=30)
        if result.returncode == 0:
            print(f"βœ… {description} - SUCCESS")
            if result.stdout.strip():
                print(f"   Output: {result.stdout.strip()}")
            return True
        else:
            print(f"❌ {description} - FAILED")
            if result.stderr.strip():
                print(f"   Error: {result.stderr.strip()}")
            return False
    except subprocess.TimeoutExpired:
        print(f"⏰ {description} - TIMEOUT")
        return False
    except Exception as e:
        print(f"❌ {description} - ERROR: {e}")
        return False

def check_python_version():
    """Check Python version compatibility"""
    version = sys.version_info
    print(f"🐍 Python version: {version.major}.{version.minor}.{version.micro}")
    
    if version.major >= 3 and version.minor >= 8:
        print("βœ… Python version is compatible")
        return True
    else:
        print("❌ Python version should be 3.8 or higher")
        return False

def check_system_packages():
    """Check if required system packages are available"""
    packages = ["gcc", "g++", "cmake", "make"]
    results = []
    
    for package in packages:
        success = run_command(f"which {package}", f"Checking {package}")
        results.append(success)
    
    return all(results)

def install_and_test_llamacpp():
    """Install and test llama-cpp-python"""
    print("\nπŸ“¦ Installing llama-cpp-python...")
    
    # Install llama-cpp-python
    install_success = run_command(
        f"{sys.executable} -m pip install llama-cpp-python --verbose",
        "Installing llama-cpp-python"
    )
    
    if not install_success:
        print("❌ Failed to install llama-cpp-python")
        return False
    
    # Test import
    test_success = run_command(
        f"{sys.executable} -c 'from llama_cpp import Llama; print(\"Import successful\")'",
        "Testing llama-cpp-python import"
    )
    
    return test_success

def main():
    """Main verification function"""
    print("πŸš€ llama.cpp Installation Verification for Hugging Face Space")
    print("=" * 70)
    
    checks = [
        ("Python Version", check_python_version),
        ("System Packages", check_system_packages),
        ("llama-cpp-python Installation", install_and_test_llamacpp),
    ]
    
    results = []
    for check_name, check_func in checks:
        print(f"\nπŸ§ͺ Running: {check_name}")
        print("-" * 40)
        result = check_func()
        results.append(result)
        print()
    
    print("=" * 70)
    print("πŸ“Š VERIFICATION SUMMARY:")
    
    for i, (check_name, _) in enumerate(checks):
        status = "βœ… PASSED" if results[i] else "❌ FAILED"
        print(f"   {check_name}: {status}")
    
    if all(results):
        print("\nπŸŽ‰ ALL CHECKS PASSED!")
        print("βœ… llama.cpp is successfully installed and ready to use.")
        print("\nπŸ“ Next steps:")
        print("   1. Run 'python test_llamacpp.py' to test the integration")
        print("   2. Start your Gradio app with 'python app.py'")
        print("   3. Upload a GGUF model file to enable full functionality")
    else:
        print("\n⚠️  SOME CHECKS FAILED!")
        print("❌ Please review the errors above and fix them before proceeding.")
        print("\nπŸ”§ Common solutions:")
        print("   - Ensure build tools are installed (build-essential, cmake)")
        print("   - Check that you have sufficient memory and disk space")
        print("   - Try reinstalling with: pip install --force-reinstall llama-cpp-python")
    
    return all(results)

if __name__ == "__main__":
    success = main()
    sys.exit(0 if success else 1)