File size: 554 Bytes
141d45c
 
 
 
0f1bd68
 
141d45c
 
 
 
 
 
 
 
0f1bd68
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
import streamlit as st

def count_parameters(model,name):
    print("Counting parameters of model:{name}")
    return sum(p.numel() for p in model.parameters() if p.requires_grad)
   
m_id = st.input_text("model id", placeholder="K00B404/Merged_Beowolx-CodePro_Medusa2-14X-7B-Mistral-I-v0-2")

with m_id:
    tokenizer = AutoTokenizer.from_pretrained(m_id)
    model = AutoModelForCausalLM.from_pretrained(m_id)

    st.info(f"{count_parameters(model, m_id)} parameters")