Vokturz commited on
Commit
74c26d6
1 Parent(s): 204281c
Files changed (1) hide show
  1. src/app.py +2 -2
src/app.py CHANGED
@@ -87,7 +87,7 @@ lora_pct = st.sidebar.slider("LoRa % trainable parameters", 0.1, 100.0, 2.0, ste
87
  st.sidebar.dataframe(gpu_spec.T)
88
 
89
  memory_table = pd.DataFrame(st.session_state[model_name][1]).set_index('dtype')
90
- memory_table['LoRA Fine-Tunning (GB)'] = (memory_table["Total Size (GB)"] +
91
  (memory_table["Parameters (Billion)"]* lora_pct/100 * (16/8)*4)) * 1.2
92
 
93
  _, col, _ = st.columns([1,3,1])
@@ -99,7 +99,7 @@ with col.expander("Information", expanded=True):
99
  where is estimated as """)
100
 
101
  st.latex(r"""\text{Memory}_\text{Inference} \approx \text{Model Size} \times 1.2""")
102
- st.markdown("""- For LoRa Fine-tunning, I'm asuming a **16-bit** dtype of trainable parameters. The formula (in terms of GB) is""")
103
  st.latex(r"\text{Memory}_\text{LoRa} \approx \text{Model Size} + \left(\text{ \# trainable Params}_\text{Billions}\times\frac{16}{8} \times 4\right) \times 1.2")
104
  st.markdown("- You can understand `int4` as models in `GPTQ-4bit`, `AWQ-4bit` or `Q4_0 GGUF/GGML` formats")
105
 
 
87
  st.sidebar.dataframe(gpu_spec.T)
88
 
89
  memory_table = pd.DataFrame(st.session_state[model_name][1]).set_index('dtype')
90
+ memory_table['LoRA Fine-Tuning (GB)'] = (memory_table["Total Size (GB)"] +
91
  (memory_table["Parameters (Billion)"]* lora_pct/100 * (16/8)*4)) * 1.2
92
 
93
  _, col, _ = st.columns([1,3,1])
 
99
  where is estimated as """)
100
 
101
  st.latex(r"""\text{Memory}_\text{Inference} \approx \text{Model Size} \times 1.2""")
102
+ st.markdown("""- For LoRa Fine-tuning, I'm asuming a **16-bit** dtype of trainable parameters. The formula (in terms of GB) is""")
103
  st.latex(r"\text{Memory}_\text{LoRa} \approx \text{Model Size} + \left(\text{ \# trainable Params}_\text{Billions}\times\frac{16}{8} \times 4\right) \times 1.2")
104
  st.markdown("- You can understand `int4` as models in `GPTQ-4bit`, `AWQ-4bit` or `Q4_0 GGUF/GGML` formats")
105