File size: 1,640 Bytes
2b9e5cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#!/usr/bin/env python3
import sys
import os
import logging
import json
from pathlib import Path

logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)

def read_config(model_dir: str) -> dict:
    """Read and return the model config"""
    config_path = Path(model_dir) / "config.json"
    with open(config_path, 'r') as f:
        return json.load(f)

def convert_phi_model(model_dir: str, output_file: str) -> None:
    """Convert Phi model with proper handling for metadata"""
    script_dir = Path("/tmp/git/llama.cpp")

    # Read config to get proper vocab size
    config = read_config(model_dir)
    vocab_size = config.get("vocab_size")  # phi-4 config: 100352

    cmd = [
        f"{script_dir}/convert_hf_to_gguf.py",
        model_dir,
        f"--outfile {output_file}",
        f"--outtype bf16",
        "--vocab-only"  # First create vocab only
    ]

    logger.info("Converting vocabulary...")
    result = os.system(" ".join(cmd))
    if result != 0:
        raise RuntimeError("Vocabulary conversion failed")

    # Now convert the full model
    cmd = [
        f"{script_dir}/convert_hf_to_gguf.py",
        model_dir,
        f"--outfile {output_file}",
        f"--outtype bf16",
        f'--metadata "{{\\"model\\":\\"phi-4\\",\\"vocab_size\\":{vocab_size}}}"'
    ]

    logger.info("Converting model...")
    result = os.system(" ".join(cmd))
    if result != 0:
        raise RuntimeError("Model conversion failed")

if __name__ == "__main__":
    model_dir = "/mnt/llm/models/phi-4/model"
    output_file = "/mnt/llm/models/phi-4.bf16.bin"

    convert_phi_model(model_dir, output_file)