mikeboone Claude Sonnet 4.6 commited on
Commit
39bd236
·
1 Parent(s): 7a5e60f

fix: add prompt logging to both spotter viz story generators

Browse files

AI and matrix story generators were calling make_request() without
logging to the prompt log, so the Prompt Log tab showed nothing for
those LLM calls.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>

Files changed (1) hide show
  1. chat_interface.py +10 -2
chat_interface.py CHANGED
@@ -2303,7 +2303,11 @@ To change settings, use:
2303
  self.log_feedback(f"🎬 Generating AI Spotter Viz story ({llm_model})...")
2304
  provider_name, model_name_str = map_llm_display_to_provider(llm_model)
2305
  researcher = MultiLLMResearcher(provider=provider_name, model=model_name_str)
2306
- return researcher.make_request([{"role": "user", "content": prompt}], max_tokens=2000, temperature=0.7)
 
 
 
 
2307
  except Exception as e:
2308
  self.log_feedback(f"⚠️ AI Spotter Viz story generation failed: {e}")
2309
  return f"*(Generation failed: {e})*"
@@ -2371,7 +2375,11 @@ To change settings, use:
2371
  self.log_feedback(f"🎬 Generating Matrix Spotter Viz story ({llm_model})...")
2372
  provider_name, model_name_str = map_llm_display_to_provider(llm_model)
2373
  researcher = MultiLLMResearcher(provider=provider_name, model=model_name_str)
2374
- return researcher.make_request([{"role": "user", "content": prompt}], max_tokens=2000, temperature=0.6)
 
 
 
 
2375
  except Exception as e:
2376
  self.log_feedback(f"⚠️ Matrix Spotter Viz story generation failed: {e}")
2377
  return f"*(Generation failed: {e})*"
 
2303
  self.log_feedback(f"🎬 Generating AI Spotter Viz story ({llm_model})...")
2304
  provider_name, model_name_str = map_llm_display_to_provider(llm_model)
2305
  researcher = MultiLLMResearcher(provider=provider_name, model=model_name_str)
2306
+ messages = [{"role": "user", "content": prompt}]
2307
+ result = researcher.make_request(messages, max_tokens=2000, temperature=0.7)
2308
+ from prompt_logger import log_researcher_call
2309
+ log_researcher_call("spotter_viz_story_ai", researcher, messages, result or "", logger=self._prompt_logger)
2310
+ return result
2311
  except Exception as e:
2312
  self.log_feedback(f"⚠️ AI Spotter Viz story generation failed: {e}")
2313
  return f"*(Generation failed: {e})*"
 
2375
  self.log_feedback(f"🎬 Generating Matrix Spotter Viz story ({llm_model})...")
2376
  provider_name, model_name_str = map_llm_display_to_provider(llm_model)
2377
  researcher = MultiLLMResearcher(provider=provider_name, model=model_name_str)
2378
+ messages = [{"role": "user", "content": prompt}]
2379
+ result = researcher.make_request(messages, max_tokens=2000, temperature=0.6)
2380
+ from prompt_logger import log_researcher_call
2381
+ log_researcher_call("spotter_viz_story_matrix", researcher, messages, result or "", logger=self._prompt_logger)
2382
+ return result
2383
  except Exception as e:
2384
  self.log_feedback(f"⚠️ Matrix Spotter Viz story generation failed: {e}")
2385
  return f"*(Generation failed: {e})*"