MergekitCustom / app.py
K00B404's picture
Update app.py
0f1bd68 verified
raw
history blame
554 Bytes
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
import streamlit as st
def count_parameters(model,name):
print("Counting parameters of model:{name}")
return sum(p.numel() for p in model.parameters() if p.requires_grad)
m_id = st.input_text("model id", placeholder="K00B404/Merged_Beowolx-CodePro_Medusa2-14X-7B-Mistral-I-v0-2")
with m_id:
tokenizer = AutoTokenizer.from_pretrained(m_id)
model = AutoModelForCausalLM.from_pretrained(m_id)
st.info(f"{count_parameters(model, m_id)} parameters")