m7mdal7aj commited on
Commit
0b0cffd
1 Parent(s): 6a15fc4

Update my_model/tabs/run_inference.py

Browse files
Files changed (1) hide show
  1. my_model/tabs/run_inference.py +7 -6
my_model/tabs/run_inference.py CHANGED
@@ -48,8 +48,9 @@ class InferenceRunner(StateManager):
48
  """
49
  free_gpu_resources()
50
  answer = model.generate_answer(question, caption, detected_objects_str)
 
51
  free_gpu_resources()
52
- return answer
53
 
54
 
55
  def image_qa_app(self, kbvqa):
@@ -120,13 +121,13 @@ class InferenceRunner(StateManager):
120
  else:
121
  if nested_col22.button('Get Answer', key=f'answer_{image_key}', disabled=self.is_widget_disabled):
122
 
123
- answer = self.answer_question(image_data['caption'], image_data['detected_objects_str'], question, kbvqa)
124
  st.session_state['loading_in_progress'] = False
125
- self.add_to_qa_history(image_key, question, answer)
126
 
127
- # Display Q&A history for each image
128
- for num, (q, a) in enumerate(qa_history):
129
- nested_col22.text(f"Q{num+1}: {q}\nA{num+1}: {a}\n")
130
 
131
 
132
  def run_inference(self):
 
48
  """
49
  free_gpu_resources()
50
  answer = model.generate_answer(question, caption, detected_objects_str)
51
+ prompt_length = modelcurrent_prompt_length
52
  free_gpu_resources()
53
+ return answer, prompt_length
54
 
55
 
56
  def image_qa_app(self, kbvqa):
 
121
  else:
122
  if nested_col22.button('Get Answer', key=f'answer_{image_key}', disabled=self.is_widget_disabled):
123
 
124
+ answer, prompt_length = self.answer_question(image_data['caption'], image_data['detected_objects_str'], question, kbvqa)
125
  st.session_state['loading_in_progress'] = False
126
+ self.add_to_qa_history(image_key, question, answer, prompt_length)
127
 
128
+ # Display Q&A history and prompts lengths for each image
129
+ for num, (q, a, p) in enumerate(qa_history):
130
+ nested_col22.text(f"Q{num+1}: {q}\nA{num+1}: {a}\nPrompt Length: {p}\n")
131
 
132
 
133
  def run_inference(self):