Delete App_Function_Libraries/Local_LLM/Local_LLM_ollama.py
Browse files
App_Function_Libraries/Local_LLM/Local_LLM_ollama.py
DELETED
@@ -1,96 +0,0 @@
|
|
1 |
-
import platform
|
2 |
-
|
3 |
-
import gradio as gr
|
4 |
-
import subprocess
|
5 |
-
import psutil
|
6 |
-
import os
|
7 |
-
import signal
|
8 |
-
|
9 |
-
|
10 |
-
def get_ollama_models():
|
11 |
-
try:
|
12 |
-
result = subprocess.run(['ollama', 'list'], capture_output=True, text=True, check=True)
|
13 |
-
models = result.stdout.strip().split('\n')[1:] # Skip header
|
14 |
-
return [model.split()[0] for model in models]
|
15 |
-
except subprocess.CalledProcessError:
|
16 |
-
return []
|
17 |
-
|
18 |
-
|
19 |
-
def pull_ollama_model(model_name):
|
20 |
-
try:
|
21 |
-
subprocess.run(['ollama', 'pull', model_name], check=True)
|
22 |
-
return f"Successfully pulled model: {model_name}"
|
23 |
-
except subprocess.CalledProcessError as e:
|
24 |
-
return f"Failed to pull model: {e}"
|
25 |
-
|
26 |
-
|
27 |
-
def serve_ollama_model(model_name, port):
|
28 |
-
try:
|
29 |
-
# Check if a server is already running on the specified port
|
30 |
-
for conn in psutil.net_connections():
|
31 |
-
if conn.laddr.port == int(port):
|
32 |
-
return f"Port {port} is already in use. Please choose a different port."
|
33 |
-
|
34 |
-
# Start the Ollama server
|
35 |
-
port = str(port)
|
36 |
-
os.environ["OLLAMA_HOST"] = port
|
37 |
-
cmd = f"ollama serve"
|
38 |
-
process = subprocess.Popen(cmd, shell=True)
|
39 |
-
return f"Started Ollama server for model {model_name} on port {port}. Process ID: {process.pid}"
|
40 |
-
except Exception as e:
|
41 |
-
return f"Error starting Ollama server: {e}"
|
42 |
-
|
43 |
-
|
44 |
-
def stop_ollama_server(pid):
|
45 |
-
try:
|
46 |
-
if platform.system() == "Windows":
|
47 |
-
os.system(f"taskkill /F /PID {pid}")
|
48 |
-
return f"Stopped Ollama server with PID {pid}"
|
49 |
-
elif platform.system() == "Linux":
|
50 |
-
os.system(f"kill {pid}")
|
51 |
-
return f"Stopped Ollama server with PID {pid}"
|
52 |
-
elif platform.system() == "Darwin":
|
53 |
-
os.system("""osascript -e 'tell app "Ollama" to quit'""")
|
54 |
-
return f"(Hopefully) Stopped Ollama server using osascript..."
|
55 |
-
except ProcessLookupError:
|
56 |
-
return f"No process found with PID {pid}"
|
57 |
-
except Exception as e:
|
58 |
-
return f"Error stopping Ollama server: {e}"
|
59 |
-
|
60 |
-
|
61 |
-
def create_ollama_tab():
|
62 |
-
with gr.Tab("Ollama Model Serving"):
|
63 |
-
gr.Markdown("# Ollama Model Serving")
|
64 |
-
|
65 |
-
with gr.Row():
|
66 |
-
model_list = gr.Dropdown(label="Available Models", choices=get_ollama_models())
|
67 |
-
refresh_button = gr.Button("Refresh Model List")
|
68 |
-
|
69 |
-
with gr.Row():
|
70 |
-
new_model_name = gr.Textbox(label="Model to Pull")
|
71 |
-
pull_button = gr.Button("Pull Model")
|
72 |
-
|
73 |
-
pull_output = gr.Textbox(label="Pull Status")
|
74 |
-
|
75 |
-
with gr.Row():
|
76 |
-
# FIXME - Update to update config.txt file
|
77 |
-
serve_model = gr.Dropdown(label="Model to Serve", choices=get_ollama_models())
|
78 |
-
port = gr.Number(label="Port", value=11434, precision=0)
|
79 |
-
serve_button = gr.Button("Start Server")
|
80 |
-
|
81 |
-
serve_output = gr.Textbox(label="Server Status")
|
82 |
-
|
83 |
-
with gr.Row():
|
84 |
-
pid = gr.Number(label="Server Process ID", precision=0)
|
85 |
-
stop_button = gr.Button("Stop Server")
|
86 |
-
|
87 |
-
stop_output = gr.Textbox(label="Stop Status")
|
88 |
-
|
89 |
-
def update_model_lists():
|
90 |
-
models = get_ollama_models()
|
91 |
-
return gr.update(choices=models), gr.update(choices=models)
|
92 |
-
|
93 |
-
refresh_button.click(update_model_lists, outputs=[model_list, serve_model])
|
94 |
-
pull_button.click(pull_ollama_model, inputs=[new_model_name], outputs=[pull_output])
|
95 |
-
serve_button.click(serve_ollama_model, inputs=[serve_model, port], outputs=[serve_output])
|
96 |
-
stop_button.click(stop_ollama_server, inputs=[pid], outputs=[stop_output])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|