customeragent-api / server /tests /final_stack_verify.py
anasraza526's picture
Clean deploy to Hugging Face
ac90985
import asyncio
import logging
import spacy
from app.services.llm_service import get_llm_service
from app.services.spacy_medical_nlp import get_spacy_nlp
# Configure Logging
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
logger = logging.getLogger("FINAL_VERIFY")
async def verify_stack():
print("\n" + "="*60)
print("πŸš€ FINAL SYSTEM STACK VERIFICATION")
print("="*60)
# --- 1. NLP Verification (SciSpaCy) ---
print("\nπŸ”¬ 1. Verifying Medical NLP (SciSpaCy)...")
try:
nlp_service = get_spacy_nlp()
if nlp_service.nlp and "sci" in nlp_service.nlp.meta.get("lang", "") + nlp_service.nlp.meta.get("name", ""):
print(" βœ… SciSpaCy Model Found: en_core_sci_md")
else:
print(f" ℹ️ Model loaded: {nlp_service.nlp.meta.get('name') if nlp_service.nlp else 'None'}")
text = "Patient prescribed 100mg Aspirin for severe hypertension."
entities = nlp_service.extract_medical_entities(text)
if entities:
print(f" βœ… Extraction Success: {entities}")
else:
print(" ⚠️ No entities extracted (Check model type)")
except Exception as e:
print(f" ❌ NLP Setup Failed: {e}")
# --- 2. LLM Verification (TinyLlama) ---
print("\nπŸ€– 2. Verifying Local LLM (TinyLlama)...")
try:
llm = get_llm_service()
prompt = "Explain fever in one sentence."
print(f" πŸ“€ Prompt: '{prompt}'")
response = await llm.generate_response(prompt, system_prompt="You are a doctor.")
print(f" πŸ“₯ Response: {response.strip()}")
if response and "apologize" not in response.lower():
print(" βœ… Local Inference Success!")
else:
print(" ⚠️ Fallback Triggered (Check LLM logs)")
except Exception as e:
print(f" ❌ LLM Inference Failed: {e}")
print("\n" + "="*60)
if __name__ == "__main__":
asyncio.run(verify_stack())