NavyDevilDoc commited on
Commit
0be55f7
·
verified ·
1 Parent(s): 1ca6074

Update src/app.py

Browse files
Files changed (1) hide show
  1. src/app.py +53 -7
src/app.py CHANGED
@@ -530,13 +530,60 @@ with tab3:
530
  sub = st.form_submit_button("Submit Answer")
531
 
532
  if sub and user_ans:
533
- with st.spinner("Grading..."):
534
  data = qs["question_data"]
535
- if data["type"] == "acronym": prompt = quiz.construct_acronym_grading_prompt(data["term"], data["correct_definition"], user_ans)
536
- else: prompt = quiz.construct_grading_prompt(qs["generated_question_text"], user_ans, data["context_text"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
537
 
 
538
  msgs = [{"role": "user", "content": prompt}]
539
- grade, _ = query_model_universal(msgs, 500, model_choice, st.session_state.get("user_openai_key"))
 
 
 
540
  qs["feedback"] = grade
541
 
542
  # Update Streak
@@ -544,13 +591,12 @@ with tab3:
544
  if is_pass: qs["streak"] += 1
545
  elif "FAIL" in grade: qs["streak"] = 0
546
 
547
- # Save to History
548
- correct_info = data['correct_definition'] if data['type'] == 'acronym' else data['context_text']
549
  st.session_state.quiz_history.append({
550
  "question": qs["generated_question_text"],
551
  "user_answer": user_ans,
552
  "grade": "PASS" if is_pass else "FAIL",
553
- "context": correct_info
554
  })
555
 
556
  st.rerun()
 
530
  sub = st.form_submit_button("Submit Answer")
531
 
532
  if sub and user_ans:
533
+ with st.spinner("Consulting the Knowledge Base & Grading..."):
534
  data = qs["question_data"]
535
+
536
+ # Grading Logic Branch
537
+ if data["type"] == "acronym":
538
+ prompt = quiz.construct_acronym_grading_prompt(
539
+ data["term"], data["correct_definition"], user_ans
540
+ )
541
+ # For acronyms, the context is just the definition
542
+ final_context_for_history = data["correct_definition"]
543
+
544
+ else:
545
+ # --- RAG ENHANCEMENT START ---
546
+ # 1. Start with the original seed text
547
+ combined_context = f"--- PRIMARY SOURCE (SEED) ---\n{data['context_text']}\n\n"
548
+
549
+ # 2. Search Pinecone for 5 more relevant chunks using the generated question
550
+ # We check if index/model are active first
551
+ if st.session_state.active_index and st.session_state.get("active_embed_model"):
552
+ try:
553
+ # We search for K=10 and Rerank to Top 5 (or just take Top 5 if no reranker)
554
+ # Using the helper function from rag_engine
555
+ related_docs = rag_engine.search_knowledge_base(
556
+ query=qs["generated_question_text"],
557
+ username=st.session_state.username,
558
+ index_name=st.session_state.active_index,
559
+ embed_model_name=st.session_state.active_embed_model,
560
+ k=15, # Fetch a broad net
561
+ final_k=5 # Narrow down to Top 5 most relevant
562
+ )
563
+
564
+ if related_docs:
565
+ combined_context += "--- RELATED DOCUMENTATION (RETRIEVED) ---\n"
566
+ for i, doc in enumerate(related_docs):
567
+ combined_context += f"[Source {i+1}]: {doc.page_content}\n\n"
568
+ except Exception as e:
569
+ # If search fails, we just proceed with the primary source
570
+ print(f"Grading Search Failed: {e}")
571
+
572
+ # 3. Construct the Prompt with the super-context
573
+ prompt = quiz.construct_grading_prompt(
574
+ qs["generated_question_text"], user_ans, combined_context
575
+ )
576
+
577
+ # Save this rich context for the Study Guide
578
+ final_context_for_history = combined_context
579
+ # --- RAG ENHANCEMENT END ---
580
 
581
+ # Call LLM
582
  msgs = [{"role": "user", "content": prompt}]
583
+ grade, _ = query_model_universal(
584
+ msgs, 1000, model_choice, st.session_state.get("user_openai_key")
585
+ )
586
+
587
  qs["feedback"] = grade
588
 
589
  # Update Streak
 
591
  if is_pass: qs["streak"] += 1
592
  elif "FAIL" in grade: qs["streak"] = 0
593
 
594
+ # Save to History (Using the enhanced context!)
 
595
  st.session_state.quiz_history.append({
596
  "question": qs["generated_question_text"],
597
  "user_answer": user_ans,
598
  "grade": "PASS" if is_pass else "FAIL",
599
+ "context": final_context_for_history
600
  })
601
 
602
  st.rerun()