Falln87 commited on
Commit
92348ee
1 Parent(s): d279147

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -3
app.py CHANGED
@@ -141,8 +141,27 @@ if model_type == "Stable Diffusion":
141
  cols[i].image(image, caption=f"Image {i+1}", use_column_width=True)
142
 
143
  elif model_type == "Llama":
144
- # Llama model implementation goes here
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
 
146
- # ...
 
147
 
148
- # ...
 
 
141
  cols[i].image(image, caption=f"Image {i+1}", use_column_width=True)
142
 
143
  elif model_type == "Llama":
144
+ model = st.selectbox("Select a Llama model:", model_dict[model_type]["Models"])
145
+ tokenizer = st.selectbox("Select a tokenizer:", model_dict[model_type]["Tokenizers"])
146
+ feature = st.selectbox("Select a feature:", model_dict[model_type]["Features"])
147
+ weight = st.selectbox("Select a weight:", model_dict[model_type]["Weights"])
148
+
149
+ if st.button("Generate Text"):
150
+ st.write("Generating text...")
151
+
152
+ tokenizer = AutoTokenizer.from_pretrained(tokenizer)
153
+ model = AutoModelForCausalLM.from_pretrained(model)
154
+
155
+ input_text = st.text_area("Enter a prompt:")
156
+
157
+ # Tokenize the input text
158
+ inputs = tokenizer(input_text, return_tensors="pt")
159
+
160
+ # Generate the text
161
+ output = model.generate(**inputs)
162
 
163
+ # Decode the generated text
164
+ generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
165
 
166
+ st.write("Generated Text:")
167
+ st.write(generated_text)