K00B404's picture
Update app.py
f0b9204 verified
raw
history blame contribute delete
No virus
565 Bytes
# Load model directly
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
import streamlit as st
def count_parameters(model,name):
print("Counting parameters of model:{name}")
return sum(p.numel() for p in model.parameters() if p.requires_grad)
m_id = st.text_input("model id", placeholder="K00B404/Merged_Beowolx-CodePro_Medusa2-14X-7B-Mistral-I-v0-2")
if m_id:
tokenizer = AutoTokenizer.from_pretrained(m_id)
model = AutoModelForCausalLM.from_pretrained(m_id)
st.info(f"{count_parameters(model, m_id)} parameters")