kouki321 commited on
Commit
c23c96d
·
verified ·
1 Parent(s): c6bbadb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -0
app.py CHANGED
@@ -3,6 +3,7 @@ import pandas as pd
3
  import streamlit as st
4
  from time import time
5
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
6
 
7
  def generate(model, input_ids, past_key_values, max_new_tokens=50):
8
  """Token-by-token generation using cache for speed."""
 
3
  import streamlit as st
4
  from time import time
5
  from transformers import AutoTokenizer, AutoModelForCausalLM
6
+ from transformers.cache_utils import DynamicCache
7
 
8
  def generate(model, input_ids, past_key_values, max_new_tokens=50):
9
  """Token-by-token generation using cache for speed."""