File size: 829 Bytes
bc220e0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import os
import requests

def download(url):
    filename = os.path.basename(url)
    r = requests.get(url, stream=True)
    with open(f'models/{filename}', 'wb') as f:
        for chunk in r.iter_content(chunk_size=1024*1024):
            if chunk:
                f.write(chunk)
                f.flush()

    return filename

def list_model():
    if not os.path.exists('models'):
        os.makedirs('models')
    return os.listdir('models')

def set_model(filename):
    if not os.path.exists(f'models/{filename}'):
        return False

    with open(f'models/set.txt', 'w', encoding='utf-8') as f:
        f.write(filename)

    cmd = f'venv/bin/python -m llama_cpp.server --model models/{filename} --port $1 --n_gpu_layers 999'
    with open(f'run.sh', 'w', encoding='utf-8') as f:
        f.write(cmd)

    return True