Spaces:
Running
Running
File size: 4,196 Bytes
9269c10 b850722 9269c10 b850722 af688eb b850722 9269c10 af688eb 2946856 9269c10 c32f628 9269c10 c32f628 9269c10 2946856 9269c10 2946856 af688eb b20052a af688eb 9269c10 af688eb 9269c10 af688eb b20052a af688eb b20052a af688eb 6d0856c 2946856 af688eb 2946856 af688eb 2946856 af688eb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
import importlib
import streamlit as st
import weave
from dotenv import load_dotenv
from guardrails_genie.guardrails import GuardrailManager
from guardrails_genie.llm import OpenAIModel
st.title(":material/robot: Guardrails Genie Playground")
load_dotenv()
weave.init(project_name="guardrails-genie")
if "guardrails" not in st.session_state:
st.session_state.guardrails = []
if "guardrail_names" not in st.session_state:
st.session_state.guardrail_names = []
if "guardrails_manager" not in st.session_state:
st.session_state.guardrails_manager = None
if "initialize_guardrails" not in st.session_state:
st.session_state.initialize_guardrails = False
if "system_prompt" not in st.session_state:
st.session_state.system_prompt = ""
if "user_prompt" not in st.session_state:
st.session_state.user_prompt = ""
if "test_guardrails" not in st.session_state:
st.session_state.test_guardrails = False
if "llm_model" not in st.session_state:
st.session_state.llm_model = None
def initialize_guardrails():
st.session_state.guardrails = []
for guardrail_name in st.session_state.guardrail_names:
if guardrail_name == "PromptInjectionSurveyGuardrail":
survey_guardrail_model = st.sidebar.selectbox(
"Survey Guardrail LLM", ["", "gpt-4o-mini", "gpt-4o"]
)
if survey_guardrail_model:
st.session_state.guardrails.append(
getattr(
importlib.import_module("guardrails_genie.guardrails"),
guardrail_name,
)(llm_model=OpenAIModel(model_name=survey_guardrail_model))
)
else:
st.session_state.guardrails.append(
getattr(
importlib.import_module("guardrails_genie.guardrails"),
guardrail_name,
)()
)
st.session_state.guardrails_manager = GuardrailManager(
guardrails=st.session_state.guardrails
)
openai_model = st.sidebar.selectbox(
"OpenAI LLM for Chat", ["", "gpt-4o-mini", "gpt-4o"]
)
chat_condition = openai_model != ""
guardrails = []
guardrail_names = st.sidebar.multiselect(
label="Select Guardrails",
options=[
cls_name
for cls_name, cls_obj in vars(
importlib.import_module("guardrails_genie.guardrails")
).items()
if isinstance(cls_obj, type) and cls_name != "GuardrailManager"
],
)
st.session_state.guardrail_names = guardrail_names
if st.sidebar.button("Initialize Guardrails") and chat_condition:
st.session_state.initialize_guardrails = True
if st.session_state.initialize_guardrails:
with st.sidebar.status("Initializing Guardrails..."):
initialize_guardrails()
st.session_state.llm_model = OpenAIModel(model_name=openai_model)
user_prompt = st.text_area("User Prompt", value="")
st.session_state.user_prompt = user_prompt
test_guardrails_button = st.button("Test Guardrails")
st.session_state.test_guardrails = test_guardrails_button
if st.session_state.test_guardrails:
with st.sidebar.status("Running Guardrails..."):
guardrails_response, call = st.session_state.guardrails_manager.guard.call(
st.session_state.guardrails_manager, prompt=st.session_state.user_prompt
)
if guardrails_response["safe"]:
st.markdown(
f"\n\n---\nPrompt is safe! Explore prompt trace on [Weave]({call.ui_url})\n\n---\n"
)
with st.sidebar.status("Generating response from LLM..."):
response, call = st.session_state.llm_model.predict.call(
st.session_state.llm_model,
user_prompts=st.session_state.user_prompt,
)
st.markdown(
response.choices[0].message.content
+ f"\n\n---\nExplore LLM generation trace on [Weave]({call.ui_url})"
)
else:
st.warning("Prompt is not safe!")
st.markdown(guardrails_response["summary"])
st.markdown(f"Explore prompt trace on [Weave]({call.ui_url})")
|