assistAI / app.py
wavesoumen's picture
start
f933b1a verified
raw
history blame contribute delete
No virus
933 Bytes
import streamlit as st
from ctransformers import AutoModelForCausalLM
# Load the model outside the main function to avoid reloading on every run
llm = AutoModelForCausalLM.from_pretrained("TheBloke/Mistral-7B-v0.1-GGUF",
model_file="mistral-7b-v0.1.Q4_K_M.gguf",
model_type="mistral",
gpu_layers=50)
def generate_response(prompt):
return llm(prompt)
def main():
st.title("AI Text Generation App")
prompt = st.text_input("Enter your prompt:")
if prompt:
try:
response = generate_response(prompt)
# Display the generated response
st.subheader("Generated Response")
st.write(response)
except Exception as e:
st.error(f"Error generating response: {e}")
if __name__ == "__main__":
main()