macadeliccc's picture
test
9c1d271
raw
history blame
1.19 kB
import spaces
import gradio as gr
import torch
import subprocess
import numpy as np
print(f"Is CUDA available: {torch.cuda.is_available()}")
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
# Function to start the ochat server
def start_ochat_server():
command = [
"python", "-m", "ochat.serving.openai_api_server",
"--model", "openchat/openchat_3.5"
]
# Start the server in a separate process
try:
subprocess.Popen(command)
return "ochat server started successfully"
except Exception as e:
return f"Failed to start ochat server: {e}"
# Function to interact with the chat server
@spaces.GPU
def chat_with_ochat(message):
# Here you would add the code to interact with the ochat server
# For simplicity, this is just a placeholder response
return "Response from ochat server"
# Start the ochat server
start_ochat_server()
# Create a Gradio Interface
iface = gr.Interface(
fn=chat_with_ochat,
inputs="text",
outputs="text",
title="ochat Chat Interface",
description="Type your message and get a response from the ochat server."
)
iface.launch(share=True)