Transformers
English
code
Inference Endpoints
File size: 2,371 Bytes
de09b1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import gradio as gr
from src.utils.dataset_loader import load_dataset
from src.models.model import load_model, run_inference

# Load dataset
df = load_dataset()

# List of available models (example)
models = ["Canstralian/text2shellcommands", "Canstralian/RabbitRedux", "Canstralian/CySec_Known_Exploit_Analyzer"]
prompt_ids = df.index.tolist() if df is not None else []

# Function to simulate conversation with model selection
def chat_interface(user_input, selected_model, prompt_id=None):
    if df is not None and prompt_id is not None:
        prompt = df.iloc[prompt_id]["prompt_text"]  # Replace with the actual column name
        # Run inference on the selected model
        response = run_inference(user_input, selected_model, prompt)
    else:
        response = f"[{selected_model}] says: You entered '{user_input}'. This is a simulated response."
    return response

# Gradio Interface
with gr.Blocks(css="./static/styles.css") as demo:
    with gr.Row():
        gr.Markdown("### Retro Hacker Chat with Debugging Prompts", elem_classes="retro-terminal")
    with gr.Row():
        user_input = gr.Textbox(
            label="Enter your message:",
            placeholder="Type your message here...",
            elem_classes="retro-terminal"
        )
        model_selector = gr.Dropdown(
            choices=models,
            label="Select Model",
            value=models[0],
            elem_classes="retro-terminal"
        )
        if prompt_ids:
            prompt_selector = gr.Dropdown(
                choices=prompt_ids,
                label="Select Debugging Prompt ID",
                value=prompt_ids[0],
                elem_classes="retro-terminal"
            )
        else:
            prompt_selector = None
    with gr.Row():
        response_box = gr.Textbox(
            label="Model Response:",
            placeholder="The model's response will appear here...",
            elem_classes="retro-terminal"
        )
    with gr.Row():
        send_button = gr.Button("Send", elem_classes="retro-terminal")

    # Link input and output
    if prompt_selector:
        send_button.click(chat_interface, inputs=[user_input, model_selector, prompt_selector], outputs=response_box)
    else:
        send_button.click(chat_interface, inputs=[user_input, model_selector], outputs=response_box)

# Launch the interface
demo.launch()