nishanthsr commited on
Commit
35690ca
1 Parent(s): febe377

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -0
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # streamlit_app.py
2
+
3
+ import streamlit as st
4
+ from transformers import AutoTokenizer, AutoModelForCausalLM
5
+ import torch
6
+
7
+ # Load the model and tokenizer
8
+ @st.cache_resource
9
+ def load_model():
10
+ tokenizer = AutoTokenizer.from_pretrained("meta-math/MetaMath-Mistral-7B")
11
+ model = AutoModelForCausalLM.from_pretrained("meta-math/MetaMath-Mistral-7B")
12
+ return tokenizer, model
13
+
14
+ tokenizer, model = load_model()
15
+
16
+ # Streamlit app layout
17
+ st.title("MetaMath Mistral 7B Question-Answering")
18
+ st.write("Ask any question, and the model will generate an answer:")
19
+
20
+ # Input from user
21
+ question = st.text_input("Enter your question:")
22
+
23
+ if st.button("Generate Answer"):
24
+ if question.strip():
25
+ # Tokenize input
26
+ inputs = tokenizer.encode(question, return_tensors="pt")
27
+
28
+ # Generate response
29
+ with torch.no_grad():
30
+ outputs = model.generate(inputs, max_length=200, num_return_sequences=1)
31
+
32
+ # Decode and display the output
33
+ answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
34
+ st.write("**Answer:**", answer)
35
+ else:
36
+ st.write("Please enter a question to get an answer.")