|
import google.generativeai as genai |
|
import streamlit as st |
|
|
|
|
|
genai.configure(api_key="AIzaSyCiaN2sysFQdEvucNdTawb0LgjdDSwcVN4") |
|
|
|
|
|
def load_prompt(): |
|
try: |
|
with open("guidelines.txt", "r", encoding="utf-8") as file: |
|
return file.read().strip() |
|
except Exception as e: |
|
return f"Error loading prompt: {e}" |
|
|
|
|
|
prompt = load_prompt() |
|
|
|
|
|
def ask_gemini(query): |
|
try: |
|
full_query = prompt + "\n\nUser: " + query + "\nAssistant:" |
|
model = genai.GenerativeModel("gemini-1.5-flash-latest") |
|
response = model.generate_content(full_query) |
|
return response.text.strip() |
|
except Exception as e: |
|
return f"Error: {e}" |
|
|
|
|
|
st.title("π¬ Kiki -- Your AI-based Humanoid Assistant") |
|
|
|
st.write("Type your message below and get a response.") |
|
|
|
|
|
user_input = st.text_input("You:", "") |
|
|
|
if user_input: |
|
response = ask_gemini(user_input) |
|
st.write("π€ Kiki:", response) |
|
|
|
st.write("πΉ Developed with Streamlit & Gemini AI.") |
|
|