|
import streamlit as st |
|
import ollama |
|
|
|
def get_llama_response(user_input): |
|
try: |
|
response = ollama.chat(model='llama2', messages=[{'role': 'user', 'content': user_input}]) |
|
return response['message']['content'] |
|
except Exception as e: |
|
st.error(f"An error occurred: {str(e)}") |
|
return "Sorry, I couldn't process your request at the moment. Please try again later." |
|
|
|
def main(): |
|
st.title("Llama Chatbot") |
|
|
|
user_input = st.text_area("You:", placeholder="Enter your message here...") |
|
|
|
if st.button("Ask"): |
|
with st.spinner("Thinking..."): |
|
response = get_llama_response(user_input) |
|
st.success("Llama says:") |
|
st.write(response) |
|
|
|
if __name__ == "__main__": |
|
main() |