|
import transformers |
|
import torch |
|
import gradio as gr |
|
from datasets import load_dataset |
|
|
|
|
|
model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct" |
|
|
|
|
|
pipeline = transformers.pipeline( |
|
"text-generation", |
|
model=model_id, |
|
model_kwargs={"torch_dtype": torch.bfloat16}, |
|
device_map="auto", |
|
) |
|
|
|
|
|
dataset = load_dataset("quantumminds/cisco_cli_commands") |
|
|
|
|
|
def search_dataset(user_input): |
|
|
|
for entry in dataset['train']: |
|
if entry["command"].lower() in user_input.lower(): |
|
return f"**Command:** {entry['command']}\n\n**Description:** {entry['description']}\n\n**Example:** {entry['examples'][0]['example_command'] if 'examples' in entry else 'No example available'}" |
|
return None |
|
|
|
|
|
def generate_response(user_input, chat_history): |
|
|
|
dataset_response = search_dataset(user_input) |
|
|
|
if dataset_response: |
|
|
|
chat_history.append({"role": "user", "content": user_input}) |
|
chat_history.append({"role": "assistant", "content": dataset_response}) |
|
return chat_history |
|
|
|
|
|
outputs = pipeline(user_input, max_new_tokens=512) |
|
|
|
|
|
assistant_response = outputs[0]["generated_text"] |
|
|
|
|
|
chat_history.append({"role": "user", "content": user_input}) |
|
chat_history.append({"role": "assistant", "content": assistant_response}) |
|
|
|
return chat_history |
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Ocean()) as iface: |
|
gr.Markdown("<h1 style='text-align: center;'>Cisco Configuration Assistant</h1>") |
|
chatbot = gr.Chatbot(label="Cisco Configuration Chatbot", type="messages", height=500) |
|
user_input = gr.Textbox(placeholder="Enter your Cisco switch/router question here...", label="Your Input") |
|
with gr.Row(): |
|
submit_btn = gr.Button("Submit") |
|
clear_btn = gr.Button("Clear Feed") |
|
|
|
def user(query, history): |
|
|
|
history = generate_response(query, history) |
|
return history, "" |
|
|
|
user_input.submit(user, [user_input, chatbot], [chatbot, user_input]) |
|
submit_btn.click(user, [user_input, chatbot], [chatbot, user_input]) |
|
clear_btn.click(lambda: [], None, chatbot, queue=False) |
|
|
|
|
|
print(dataset) |
|
|
|
|
|
iface.launch() |
|
|