xzl12306 commited on
Commit
6d74e51
1 Parent(s): 5fb9fdb

fix spaces

Browse files
app.py CHANGED
@@ -32,6 +32,7 @@ from tinychart.eval.eval_metric import parse_model_output, evaluate_cmds
32
 
33
  from transformers import TextIteratorStreamer
34
  from pathlib import Path
 
35
 
36
  DEFAULT_MODEL_PATH = "mPLUG/TinyChart-3B-768"
37
  DEFAULT_MODEL_NAME = "TinyChart-3B-768"
@@ -106,6 +107,7 @@ def is_float(value):
106
 
107
 
108
  @torch.inference_mode()
 
109
  def get_response(params):
110
  prompt = params["prompt"]
111
  ori_prompt = prompt
 
32
 
33
  from transformers import TextIteratorStreamer
34
  from pathlib import Path
35
+ import spaces
36
 
37
  DEFAULT_MODEL_PATH = "mPLUG/TinyChart-3B-768"
38
  DEFAULT_MODEL_NAME = "TinyChart-3B-768"
 
107
 
108
 
109
  @torch.inference_mode()
110
+ @spaces.GPU
111
  def get_response(params):
112
  prompt = params["prompt"]
113
  ori_prompt = prompt
tinychart/model/language_model/llava_phi.py CHANGED
@@ -29,7 +29,6 @@ from transformers.modeling_outputs import CausalLMOutputWithPast
29
 
30
  from tinychart.model.llava_arch import LlavaMetaModel, LlavaMetaForCausalLM
31
  from tinychart.model.model_factory import *
32
- import spaces
33
 
34
  class TinyChartPhiConfig(PhiConfig):
35
  model_type = "tiny_chart_phi"
@@ -104,7 +103,6 @@ class TinyChartPhiForCausalLM(PhiForCausalLM, LlavaMetaForCausalLM):
104
  )
105
 
106
  @torch.no_grad()
107
- @spaces.GPU
108
  def generate(
109
  self,
110
  inputs: Optional[torch.Tensor] = None,
 
29
 
30
  from tinychart.model.llava_arch import LlavaMetaModel, LlavaMetaForCausalLM
31
  from tinychart.model.model_factory import *
 
32
 
33
  class TinyChartPhiConfig(PhiConfig):
34
  model_type = "tiny_chart_phi"
 
103
  )
104
 
105
  @torch.no_grad()
 
106
  def generate(
107
  self,
108
  inputs: Optional[torch.Tensor] = None,