dwb2023 commited on
Commit
461b9e0
1 Parent(s): 273f115

Update utils.py

Browse files

removing lru_cache

Files changed (1) hide show
  1. utils.py +0 -2
utils.py CHANGED
@@ -2,7 +2,6 @@ import subprocess
2
  import os
3
  import torch
4
  from transformers import BitsAndBytesConfig, AutoConfig, AutoModelForCausalLM, LlavaNextForConditionalGeneration, LlavaForConditionalGeneration, PaliGemmaForConditionalGeneration, Idefics2ForConditionalGeneration
5
- from functools import lru_cache
6
  import spaces
7
 
8
  os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
@@ -26,7 +25,6 @@ ARCHITECTURE_MAP = {
26
 
27
  # Function to get the model summary with caching and GPU support
28
  @spaces.GPU
29
- @lru_cache(maxsize=10)
30
  def get_model_summary(model_name):
31
  """
32
  Retrieve the model summary for the given model name.
 
2
  import os
3
  import torch
4
  from transformers import BitsAndBytesConfig, AutoConfig, AutoModelForCausalLM, LlavaNextForConditionalGeneration, LlavaForConditionalGeneration, PaliGemmaForConditionalGeneration, Idefics2ForConditionalGeneration
 
5
  import spaces
6
 
7
  os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
 
25
 
26
  # Function to get the model summary with caching and GPU support
27
  @spaces.GPU
 
28
  def get_model_summary(model_name):
29
  """
30
  Retrieve the model summary for the given model name.