File size: 1,189 Bytes
5956319
74995d7
5956319
 
71b7f64
5956319
9c1d271
 
f4995ab
9c1d271
 
5956319
 
 
 
 
 
9c1d271
5956319
 
 
 
 
 
9c1d271
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import spaces
import gradio as gr
import torch
import subprocess
import numpy as np

print(f"Is CUDA available: {torch.cuda.is_available()}")
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")


# Function to start the ochat server
def start_ochat_server():
    command = [
        "python", "-m", "ochat.serving.openai_api_server", 
        "--model", "openchat/openchat_3.5"
    ]

    # Start the server in a separate process
    try:
        subprocess.Popen(command)
        return "ochat server started successfully"
    except Exception as e:
        return f"Failed to start ochat server: {e}"


# Function to interact with the chat server
@spaces.GPU
def chat_with_ochat(message):
    # Here you would add the code to interact with the ochat server
    # For simplicity, this is just a placeholder response
    return "Response from ochat server"


# Start the ochat server
start_ochat_server()

# Create a Gradio Interface
iface = gr.Interface(
    fn=chat_with_ochat, 
    inputs="text", 
    outputs="text",
    title="ochat Chat Interface",
    description="Type your message and get a response from the ochat server."
)

iface.launch(share=True)