File size: 1,866 Bytes
25e3db0
 
 
b71bc0f
 
25e3db0
22f5186
b71bc0f
 
 
 
 
 
 
25e3db0
b71bc0f
 
25e3db0
b71bc0f
25e3db0
 
 
b71bc0f
25e3db0
 
 
22f5186
 
 
 
25e3db0
6786b93
 
 
25e3db0
22f5186
25e3db0
 
 
22f5186
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import gradio as gr
from huggingface_hub import InferenceClient

# Use the specified model for inference
client = InferenceClient("canstralian/rabbit-redux")

def respond(message, history, system_message, max_tokens, temperature, top_p):
    # Construct a single prompt from the system message, history, and current user message
    prompt = system_message + "\n"
    for user_msg, assistant_msg in history:
        prompt += f"User: {user_msg}\nAssistant: {assistant_msg}\n"
    prompt += f"User: {message}\nAssistant:"

    # Generate the response
    response = ""
    for output in client.text_generation(
        prompt, max_tokens=max_tokens, temperature=temperature, top_p=top_p, stream=True
    ):
        token = output.token.text
        response += token
        yield response

# Set up the Gradio interface
demo = gr.ChatInterface(
    respond,
    additional_inputs=[
        gr.Textbox(
            value="You are a cybersecurity assistant specialized in penetration testing. You provide accurate, detailed, and actionable advice on security assessments, vulnerability analysis, and ethical hacking techniques. Your responses are concise, technical, and focused on industry best practices. You prioritize clarity and safety, ensuring all recommendations adhere to ethical guidelines and legal standards.",
            label="System message",
            placeholder="e.g., 'Provide advice focused on security and ethical guidelines.'"
        ),
        gr.Slider(minimum=1, maximum=1024, value=512, step=1, label="Max new tokens"),
        gr.Slider(minimum=0.2, maximum=1.5, value=0.7, step=0.05, label="Temperature"),
        gr.Slider(minimum=0.1, maximum=0.9, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
    ],
    css=".chatbot-box { font-family: Arial, sans-serif; padding: 10px; }"
)

if __name__ == "__main__":
    demo.launch()