quote-generator-de / st_app.py
aleneum's picture
upload gradio app
0b8e35a
raw
history blame
3.46 kB
import random
import streamlit as st
from ctransformers import AutoModelForCausalLM
system_prompt = """Dies ist eine Unterhaltung zwischen \
einem intelligenten, hilfsbereitem \
KI-Assistenten und einem Nutzer.
Der Assistent gibt Antworten in Form von Zitaten."""
prompt_format = "<|im_start|>system\n{system_prompt}\
<|im_end|>\n<|im_start|>user\nZitiere {prompt}\
<|im_end|>\n<|im_start|>assistant\n"
modes = {
"Authentisch": {"temperature": 0.05, "top_k": 10},
"Ausgeglichen": {"temperature": 0.5, "top_p": 0.9},
"Chaotisch": {"temperature": 0.9},
}
authors = [
"Johann Wolfgang von Goethe",
"Friedrich Schiller",
"Immanuel Kant",
"Oscar Wilde",
"Lü Bu We",
"Wilhelm Busch",
"Friedrich Nietzsche",
"Karl Marx",
"William Shakespeare",
"Kurt Tucholsky",
"Georg Christoph Lichtenberg",
"Arthur Schopenhauer",
"Seneca der Jüngere",
"Martin Luther",
"Mark Twain",
"Cicero",
"Marie von Ebner-Eschenbach",
"Novalis",
"Franz Kafka",
"Jean-Jacques Rousseau",
"Heinrich Heine",
"Honoré de Balzac",
"Georg Büchner",
"Gotthold Ephraim Lessing",
"Markus M. Ronner",
"Gerhard Uhlenbruck",
"Theodor Fontane",
"Jean Paul",
"Leo Tolstoi",
"Friedrich Hebbel",
"Horaz",
"Albert Einstein",
"Jesus von Nazareth",
"Angela Merkel",
"Ambrose Bierce",
"Christian Morgenstern",
"Friedrich Hölderlin",
"Joseph Joubert",
"François de La Rochefoucauld",
"Otto von Bismarck",
"Fjodor Dostojewski",
"Ovid",
"Rudolf Steiner",
"Ludwig Börne",
"Hugo von Hofmannsthal",
"Laotse",
"Thomas von Aquin",
"Ludwig Wittgenstein",
"Friedrich Engels",
"Charles de Montesquieu",
]
st.title("Zitatgenerator")
# Initialization
if "model" not in st.session_state:
data_load_state = st.text("Lade Modell...")
model = AutoModelForCausalLM.from_pretrained(
"caretech-owl/leo-hessionai-7B-quotes-gguf", model_type="Llama"
)
st.session_state["model"] = model
st.session_state["author"] = ""
data_load_state.text("Modell geladen!")
else:
data_load_state = st.text("Modell geladen!")
with st.form("user_form", clear_on_submit=False):
col1, col2 = st.columns([2, 1])
with col1:
placeholder = st.empty()
question = placeholder.text_input(
"Zitat generieren von: ",
placeholder="Zufallsautor",
value=st.session_state["author"],
)
generate = st.form_submit_button(label="Zitat generieren")
with col2:
mode = st.selectbox(
"Modus:",
("Authentisch", "Ausgeglichen", "Chaotisch"),
index=1,
)
if generate:
if not question:
question = placeholder.text_input(
"Zitat generieren von: ",
placeholder="Aristoteles",
value=random.choice(authors),
)
st.session_state["author"] = question
with st.spinner("Denke über Zitat nach (das kann etwas dauern)..."):
query = prompt_format.format(
system_prompt=system_prompt,
prompt=st.session_state["author"],
)
print("=" * 20)
print(query)
output = st.session_state["model"](
query, stop="<|im_end|>", max_new_tokens=200, **modes[mode]
)
print("-" * 20)
print(output)
print("=" * 20)
st.success(output)