QureadAI / tests /test_app_flows.py
hchevva's picture
Upload 2 files
2e0a017 verified
import os
import pathlib
import sys
import json
import types
import unittest
import tempfile
import zipfile
from unittest.mock import patch
import numpy as np
def _install_gradio_stub() -> None:
if "gradio" in sys.modules:
return
module = types.ModuleType("gradio")
class _Event:
def then(self, *args, **kwargs):
return self
class _Component:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def click(self, *args, **kwargs):
return _Event()
def change(self, *args, **kwargs):
return _Event()
class _Context:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
class _Blocks(_Context):
def load(self, *args, **kwargs):
return _Event()
def launch(self, *args, **kwargs):
return None
class _Themes:
class Soft:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
module.themes = _Themes()
module.Blocks = _Blocks
module.Row = _Context
module.Column = _Context
module.Group = _Context
module.Tabs = _Context
module.Tab = _Context
module.Accordion = _Context
module.State = _Component
module.Markdown = _Component
module.Slider = _Component
module.Button = _Component
module.Dropdown = _Component
module.Checkbox = _Component
module.HTML = _Component
module.Code = _Component
module.Dataframe = _Component
module.Textbox = _Component
module.File = _Component
module.DownloadButton = _Component
module.Plot = _Component
sys.modules["gradio"] = module
def _install_openai_stub() -> None:
if "openai" in sys.modules:
return
module = types.ModuleType("openai")
class _Completions:
def create(self, *args, **kwargs):
raise RuntimeError("OpenAI call is stubbed in tests.")
class _Chat:
def __init__(self):
self.completions = _Completions()
class OpenAI:
def __init__(self, *args, **kwargs):
self.chat = _Chat()
module.OpenAI = OpenAI
sys.modules["openai"] = module
def _install_export_pdf_stub() -> None:
if "quread.export_pdf" in sys.modules:
return
module = types.ModuleType("quread.export_pdf")
def md_to_pdf(markdown_text: str, output_path: str):
pathlib.Path(output_path).write_text(markdown_text or "", encoding="utf-8")
module.md_to_pdf = md_to_pdf
sys.modules["quread.export_pdf"] = module
_install_gradio_stub()
_install_openai_stub()
_install_export_pdf_stub()
import app
from quread.engine import QuantumStateVector
from quread.def_translator import DEFGridConfig, build_def_blockages, to_def_blockages_fragment
DEFAULT_HEAT_METRIC = "activity_count"
DEFAULT_SEVERITY_ARGS = (
"linear",
"composite_risk",
0.45,
0.70,
67.0,
90.0,
0.25,
0.20,
0.15,
0.25,
0.15,
False,
)
def _qec_manifest(num_samples_ai=10000):
return {
"bundle_version": "1.0",
"source": "nvidia_ising_decoding",
"code_family": "surface_code",
"experiment_name": "surface_d13_demo",
"distance": 13,
"n_rounds": 104,
"basis": "X",
"rotation": "O1",
"noise_model_label": "public_default",
"generated_by": "tests",
"timestamp": "2026-04-15T10:00:00Z",
"notes": "test bundle",
"model": {
"variant": "fast",
"model_id": 1,
"checkpoint_name": "Ising-Decoder-SurfaceCode-1-Fast.pt",
},
"decoders": {
"baseline": {
"name": "pymatching",
"ler": 0.0123,
"latency_ms": 4.8,
"syndrome_density_before": 0.031,
"syndrome_density_after": 0.031,
"logical_failures": 123,
"num_samples": 10000,
},
"ai_predecoder_plus_baseline": {
"name": "ising_predecoder_plus_pymatching",
"ler": 0.0104,
"latency_ms": 3.2,
"syndrome_density_before": 0.031,
"syndrome_density_after": 0.011,
"logical_failures": 104,
"num_samples": int(num_samples_ai),
},
},
"artifacts": [
{"path": "artifacts/run.log", "kind": "log"},
{"path": "artifacts/notes.txt", "kind": "txt"},
],
}
def _write_qec_bundle(manifest=None, artifacts=None):
manifest = dict(manifest or _qec_manifest())
artifacts = dict(artifacts or {"artifacts/run.log": "decoder log", "artifacts/notes.txt": "notes"})
fd, path = tempfile.mkstemp(prefix="qec_bundle_", suffix=".zip")
os.close(fd)
with zipfile.ZipFile(path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
zf.writestr("manifest.json", json.dumps(manifest))
for name, content in artifacts.items():
zf.writestr(name, content)
return path
class AppFlowsTest(unittest.TestCase):
def test_apply_selected_gate_recovers_from_missing_qc_state(self):
qc, last_counts, status = app.apply_selected_gate(None, None, "H", 0, 2)
self.assertIsNotNone(qc)
self.assertEqual(qc.n_qubits, 2)
self.assertEqual(len(qc.history), 1)
self.assertIsNone(last_counts)
self.assertIn("Applied H on q0", status)
def test_metrics_pipeline_recovers_from_missing_qc_state(self):
metrics, meta, qubit_coords, layout_meta = app._current_metrics_and_layout(
None,
2,
2,
2,
None,
"",
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
)
self.assertIn("composite_risk", metrics)
self.assertEqual(len(metrics["composite_risk"]), 2)
self.assertEqual(qubit_coords[0], (0, 0))
self.assertEqual(layout_meta["source"], "default")
def test_qubit_count_change_reinitializes_simulator(self):
qc, last_counts, selected_gate, _target, _control, _cnot_target, status = app._on_qubit_count_change(3)
self.assertEqual(qc.n_qubits, 3)
self.assertEqual(qc.history, [])
self.assertIsNone(last_counts)
self.assertEqual(selected_gate, "H")
self.assertIn("Reinitialized simulator with 3 qubits", status)
def test_write_tmp_generates_unique_paths(self):
p1 = app._write_tmp("circuit.qasm", "OPENQASM 2.0;")
p2 = app._write_tmp("circuit.qasm", "OPENQASM 2.0;")
try:
self.assertNotEqual(p1, p2)
self.assertTrue(pathlib.Path(p1).exists())
self.assertTrue(pathlib.Path(p2).exists())
self.assertEqual(pathlib.Path(p1).read_text(encoding="utf-8"), "OPENQASM 2.0;")
self.assertEqual(pathlib.Path(p2).read_text(encoding="utf-8"), "OPENQASM 2.0;")
finally:
for path in (p1, p2):
try:
os.remove(path)
except FileNotFoundError:
pass
def test_explain_reuse_preserves_previous_markdown(self):
qc = QuantumStateVector(2)
last_hash = app._circuit_hash(qc.history)
shown, returned_hash, stored_md = app.explain_llm(
qc=qc,
n_qubits=2,
shots=1024,
last_hash=last_hash,
previous_explanation="previous explanation",
)
self.assertEqual(returned_hash, last_hash)
self.assertEqual(stored_md, "previous explanation")
self.assertIn("Reusing previous explanation", shown)
def test_explain_failure_preserves_previous_markdown(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
with patch.object(app, "explain_with_gpt4o", side_effect=RuntimeError("boom")):
shown, returned_hash, stored_md = app.explain_llm(
qc=qc,
n_qubits=2,
shots=1024,
last_hash="",
previous_explanation="previous explanation",
)
self.assertEqual(returned_hash, "")
self.assertEqual(stored_md, "previous explanation")
self.assertIn("Explanation request failed", shown)
self.assertIn("Showing previous explanation", shown)
def test_hotspot_rows_sorted_descending(self):
metrics = {
"composite_risk": np.array([0.22, 0.91, 0.45], dtype=float),
"hotspot_level": np.array([0, 2, 1], dtype=float),
"activity_count": np.array([1.0, 4.0, 2.0], dtype=float),
"gate_error": np.array([0.01, 0.04, 0.02], dtype=float),
"readout_error": np.array([0.02, 0.05, 0.03], dtype=float),
"state_fidelity": np.array([0.98, 0.82, 0.91], dtype=float),
"process_fidelity": np.array([0.97, 0.79, 0.9], dtype=float),
"coherence_health": np.array([0.8, 0.5, 0.7], dtype=float),
"decoherence_risk": np.array([0.2, 0.6, 0.3], dtype=float),
"fidelity": np.array([0.99, 0.95, 0.97], dtype=float),
}
rows = app._hotspot_rows(metrics, n_qubits=3, top_k=2)
self.assertEqual(len(rows), 2)
self.assertEqual(rows[0][0], 1)
self.assertEqual(rows[0][1], "critical")
self.assertGreaterEqual(rows[0][2], rows[1][2])
def test_hotspot_rows_include_layout_coordinates(self):
metrics = {
"composite_risk": np.array([0.22, 0.91, 0.45], dtype=float),
"hotspot_level": np.array([0, 2, 1], dtype=float),
"activity_count": np.array([1.0, 4.0, 2.0], dtype=float),
"gate_error": np.array([0.01, 0.04, 0.02], dtype=float),
"readout_error": np.array([0.02, 0.05, 0.03], dtype=float),
"state_fidelity": np.array([0.98, 0.82, 0.91], dtype=float),
"process_fidelity": np.array([0.97, 0.79, 0.9], dtype=float),
"coherence_health": np.array([0.8, 0.5, 0.7], dtype=float),
"decoherence_risk": np.array([0.2, 0.6, 0.3], dtype=float),
"fidelity": np.array([0.99, 0.95, 0.97], dtype=float),
}
rows = app._hotspot_rows(
metrics,
n_qubits=3,
top_k=1,
qubit_coords={1: (2, 3)},
)
self.assertEqual(rows[0][-2], 2)
self.assertEqual(rows[0][-1], 3)
def test_ideal_vs_noisy_plot_returns_figure(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
fig = app._ideal_vs_noisy_plot(
qc=qc,
shots=64,
calibration_text='{"qubits":{"0":{"readout_error":0.1},"1":{"readout_error":0.1}}}',
readout_scale=1.0,
depolarizing_prob=0.1,
)
self.assertTrue(hasattr(fig, "axes"))
self.assertGreaterEqual(len(fig.axes), 1)
def test_heatmap_flow_returns_raw_vs_severity_comparison(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
calibration_text = (
'{"qubits":{"0":{"gate_error":0.7,"readout_error":0.2,"t1_us":10,"t2_us":9,"fidelity":0.75},'
'"1":{"gate_error":0.05,"readout_error":0.02,"t1_us":90,"t2_us":88,"fidelity":0.99}}}'
)
(
heat_fig,
comparison_status,
comparison_fig,
comparison_rows,
hotspot_status,
hotspot_rows,
detail_md,
detail_fig,
) = app._heat_and_hotspots_from_current(
qc,
2,
2,
2,
"gate_error",
"static",
None,
calibration_text,
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
0.0,
0,
4,
)
self.assertTrue(hasattr(heat_fig, "axes"))
self.assertTrue(hasattr(comparison_fig, "axes"))
self.assertEqual(len(comparison_fig.axes), 6)
self.assertIn("raw risk uses `gate_error`", comparison_status)
self.assertGreaterEqual(len(comparison_rows), 1)
self.assertEqual(comparison_rows[0][1], "gate_error")
self.assertIn("Layout map:", hotspot_status)
self.assertGreaterEqual(len(hotspot_rows), 1)
self.assertIn("Hotspot Detail", detail_md)
self.assertTrue(hasattr(detail_fig, "axes"))
def test_severity_csv_export_returns_file(self):
qc = QuantumStateVector(2)
path = app._dl_severity_csv(
qc,
2,
2,
2,
DEFAULT_HEAT_METRIC,
None,
"",
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
)
try:
text = pathlib.Path(path).read_text(encoding="utf-8")
self.assertIn("severity_mode", text.splitlines()[0])
self.assertIn("pnr_cost", text.splitlines()[0])
finally:
try:
os.remove(path)
except FileNotFoundError:
pass
def test_comparison_csv_export_returns_file(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
path = app._dl_comparison_csv(
qc,
2,
2,
2,
"gate_error",
None,
"",
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
)
try:
text = pathlib.Path(path).read_text(encoding="utf-8")
self.assertIn("raw_metric", text.splitlines()[0])
self.assertIn("abs_shift", text.splitlines()[0])
finally:
try:
os.remove(path)
except FileNotFoundError:
pass
def test_load_external_def_settings_from_lef_and_def(self):
lef_text = """
VERSION 5.8 ;
UNITS
DATABASE MICRONS 2000 ;
END UNITS
SITE CORE
CLASS CORE ;
SIZE 0.19 BY 1.40 ;
END CORE
"""
def_text = """
VERSION 5.8 ;
UNITS DISTANCE MICRONS 2000 ;
DIEAREA ( 0 0 ) ( 3040 11200 ) ;
ROW ROW_0 CORE 0 0 N DO 16 BY 1 STEP 380 0 ;
ROW ROW_1 CORE 0 2800 FS DO 16 BY 1 STEP 380 0 ;
ROW ROW_2 CORE 0 5600 N DO 16 BY 1 STEP 380 0 ;
ROW ROW_3 CORE 0 8400 FS DO 16 BY 1 STEP 380 0 ;
"""
lef_path = app._write_tmp("tech.tlef", lef_text)
def_path = app._write_tmp("floorplan.def", def_text)
try:
result = app._load_external_def_settings(lef_path, def_path, 4, 4)
self.assertEqual(result[0], "0")
self.assertEqual(result[1], "0")
self.assertEqual(result[2], "380")
self.assertEqual(result[3], "2800")
self.assertEqual(result[4], "4")
self.assertEqual(result[5], "1")
self.assertIn("Imported DEF settings", result[6])
finally:
for path in (lef_path, def_path):
try:
os.remove(path)
except FileNotFoundError:
pass
def test_preview_external_def_settings_returns_metadata_card(self):
lef_text = """
VERSION 5.8 ;
UNITS
DATABASE MICRONS 2000 ;
END UNITS
SITE CORE
CLASS CORE ;
SIZE 0.19 BY 1.40 ;
END CORE
"""
def_text = """
VERSION 5.8 ;
UNITS DISTANCE MICRONS 2000 ;
DIEAREA ( 0 0 ) ( 3040 11200 ) ;
ROW ROW_0 CORE 0 0 N DO 16 BY 1 STEP 380 0 ;
ROW ROW_1 CORE 0 2800 FS DO 16 BY 1 STEP 380 0 ;
ROW ROW_2 CORE 0 5600 N DO 16 BY 1 STEP 380 0 ;
ROW ROW_3 CORE 0 8400 FS DO 16 BY 1 STEP 380 0 ;
"""
lef_path = app._write_tmp("tech.tlef", lef_text)
def_path = app._write_tmp("floorplan.def", def_text)
try:
preview_state, preview_md, preview_rows, preview_fig, status = app._preview_external_def_settings(
lef_path,
def_path,
4,
4,
)
self.assertEqual(preview_state["site_width_dbu"], 380)
self.assertEqual(preview_state["rows_per_cell_y"], 1)
self.assertIn("Imported LEF/DEF Preview", preview_md)
self.assertIn("Current heatmap grid overlay", preview_md)
self.assertTrue(any(row[0] == "Matched site" and row[1] == "CORE" for row in preview_rows))
self.assertTrue(hasattr(preview_fig, "axes"))
self.assertIn("heatmap grid overlay", preview_fig.axes[0].get_title())
self.assertGreater(len(preview_fig.axes[0].lines), 0)
self.assertIn("Preview ready", status)
finally:
for path in (lef_path, def_path):
try:
os.remove(path)
except FileNotFoundError:
pass
def test_apply_previewed_def_settings_returns_control_values(self):
preview_state = {
"origin_x_dbu": 100,
"origin_y_dbu": 200,
"site_width_dbu": 380,
"row_height_dbu": 2800,
"sites_per_cell_x": 4,
"rows_per_cell_y": 1,
}
result = app._apply_previewed_def_settings(preview_state)
self.assertEqual(result[:6], ("100", "200", "380", "2800", "4", "1"))
self.assertIn("Applied previewed DEF settings", result[6])
def test_severity_lock_uses_heat_metric_source(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
calibration_text = (
'{"qubits":{"0":{"gate_error":0.85,"readout_error":0.02,"t1_us":80,"t2_us":60,"fidelity":0.99},'
'"1":{"gate_error":0.05,"readout_error":0.02,"t1_us":80,"t2_us":60,"fidelity":0.40}}}'
)
severity_rows, _metrics, _meta, _coords, _layout_meta, severity_cfg = app._current_severity_rows(
qc,
2,
2,
2,
None,
calibration_text,
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
"linear",
"fidelity",
0.45,
0.70,
67.0,
90.0,
0.25,
0.20,
0.15,
0.25,
0.15,
heat_metric="gate_error",
severity_lock_to_heatmap_metric=True,
)
self.assertEqual(severity_cfg.source_metric, "gate_error")
self.assertEqual(severity_rows[0]["qubit"], 0)
self.assertEqual(severity_rows[0]["source_metric"], "gate_error")
def test_def_export_returns_fragment_file(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
calibration_text = (
'{"qubits":{"0":{"gate_error":0.8,"readout_error":0.25,"t1_us":8,"t2_us":7,"fidelity":0.72},'
'"1":{"gate_error":0.05,"readout_error":0.03,"t1_us":90,"t2_us":85,"fidelity":0.99}}}'
)
path = app._dl_def_blockages(
qc,
2,
2,
2,
DEFAULT_HEAT_METRIC,
None,
calibration_text,
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
"0",
"0",
"40",
"20",
"4",
"2",
)
try:
text = pathlib.Path(path).read_text(encoding="utf-8")
self.assertIn("BLOCKAGES", text)
self.assertIn("RECT", text)
finally:
try:
os.remove(path)
except FileNotFoundError:
pass
def test_combined_design_package_export_returns_zip(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
calibration_text = (
'{"qubits":{"0":{"gate_error":0.8,"readout_error":0.25,"t1_us":8,"t2_us":7,"fidelity":0.72},'
'"1":{"gate_error":0.05,"readout_error":0.03,"t1_us":90,"t2_us":85,"fidelity":0.99}}}'
)
lef_text = """
VERSION 5.8 ;
DATABASE MICRONS 1000 ;
SITE CORE
CLASS CORE ;
SIZE 0.20 BY 0.40 ;
END CORE
"""
def_text = """
UNITS DISTANCE MICRONS 1000 ;
DIEAREA ( 0 0 ) ( 3200 3200 ) ;
ROW ROW0 CORE 0 0 N DO 16 BY 1 STEP 200 0 ;
ROW ROW1 CORE 0 400 FS DO 16 BY 1 STEP 200 0 ;
"""
lef_path = app._write_tmp("tech.tlef", lef_text)
def_path = app._write_tmp("design.def", def_text)
path = None
try:
path = app._dl_design_package(
qc,
2,
4,
4,
DEFAULT_HEAT_METRIC,
None,
calibration_text,
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
0.0,
8,
"0",
"0",
"200",
"400",
"4",
"4",
lef_path,
def_path,
)
self.assertTrue(pathlib.Path(path).exists())
with zipfile.ZipFile(path, "r") as zf:
names = set(zf.namelist())
self.assertIn("quread_design_report.html", names)
self.assertIn("validation_report.html", names)
self.assertIn("validation_report.json", names)
self.assertIn("quread_heatmap_abstract.lef", names)
self.assertIn("quread_heatmap_annotations.lib", names)
self.assertIn("quread_risk_blockages.def", names)
self.assertIn("qubit_metrics.csv", names)
self.assertIn("qubit_severity.csv", names)
report = zf.read("quread_design_report.html").decode("utf-8")
self.assertIn("Quread Combined Design Package", report)
validation_html = zf.read("validation_report.html").decode("utf-8")
self.assertIn("Quread Validation Report", validation_html)
companion_lef = zf.read("quread_heatmap_abstract.lef").decode("utf-8")
self.assertIn("MACRO QUREAD_Q0_", companion_lef)
companion_lib = zf.read("quread_heatmap_annotations.lib").decode("utf-8")
self.assertIn("library (quread_heatmap_companion)", companion_lib)
finally:
for candidate in (lef_path, def_path, path):
if not candidate:
continue
try:
os.remove(candidate)
except FileNotFoundError:
pass
def test_render_qec_bundle_returns_ready_status(self):
bundle_path = _write_qec_bundle()
try:
state, status, summary_rows, fig, findings_md, comparison_rows, artifact_rows, preview_text = app._render_qec_bundle(
bundle_path
)
self.assertIn("QEC bundle ready", status)
self.assertEqual(state["manifest"]["code_family"], "surface_code")
self.assertGreaterEqual(len(summary_rows), 1)
self.assertEqual(len(comparison_rows), 2)
self.assertGreaterEqual(len(artifact_rows), 2)
self.assertIn("manifest.json", preview_text)
self.assertIn("AI pre-decoding", findings_md)
self.assertTrue(hasattr(fig, "axes"))
finally:
os.remove(bundle_path)
def test_render_qec_bundle_invalid_returns_error(self):
bad_path = app._write_tmp("not_a_zip.txt", "not a zip")
try:
state, status, summary_rows, fig, findings_md, comparison_rows, artifact_rows, preview_text = app._render_qec_bundle(
bad_path
)
self.assertEqual(state, {})
self.assertIn("QEC bundle failed", status)
self.assertEqual(summary_rows, [])
self.assertEqual(comparison_rows, [])
self.assertEqual(artifact_rows, [])
self.assertIn("Bundle invalid", findings_md)
self.assertEqual(preview_text, "")
self.assertTrue(hasattr(fig, "axes"))
finally:
try:
os.remove(bad_path)
except FileNotFoundError:
pass
def test_qec_report_package_export_returns_zip(self):
bundle_path = _write_qec_bundle()
output_path = None
try:
state, _status, _summary_rows, _fig, _findings_md, _comparison_rows, _artifact_rows, _preview_text = app._render_qec_bundle(
bundle_path
)
output_path = app._dl_qec_report_package(state)
self.assertTrue(pathlib.Path(output_path).exists())
with zipfile.ZipFile(output_path, "r") as zf:
names = set(zf.namelist())
self.assertIn("qec_decoder_report.html", names)
self.assertIn("decoder_comparison.csv", names)
self.assertIn("bundle_manifest_normalized.json", names)
self.assertIn("original_input_bundle.zip", names)
report = zf.read("qec_decoder_report.html").decode("utf-8")
self.assertIn("QEC Decoder Report", report)
finally:
for candidate in (bundle_path, output_path):
if not candidate:
continue
try:
os.remove(candidate)
except FileNotFoundError:
pass
def test_sample_qec_bundle_download_returns_valid_zip(self):
output_path = app._dl_sample_qec_bundle()
try:
self.assertTrue(pathlib.Path(output_path).exists())
with zipfile.ZipFile(output_path, "r") as zf:
names = set(zf.namelist())
self.assertIn("manifest.json", names)
self.assertIn("artifacts/run.log", names)
self.assertIn("artifacts/model.onnx", names)
parsed = app.parse_qec_bundle(output_path)
self.assertEqual(parsed["manifest"]["code_family"], "surface_code")
self.assertEqual(len(parsed["artifact_rows"]), 5)
finally:
try:
os.remove(output_path)
except FileNotFoundError:
pass
def test_render_validation_report_returns_summary_and_downloads(self):
qc = QuantumStateVector(2)
state, status, summary_md, focus_md, fig, artifact_rows, focus_rows, rows = app._render_validation_report(
qc,
2,
2,
2,
DEFAULT_HEAT_METRIC,
None,
"",
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
0.0,
8,
"0",
"0",
"100",
"100",
"10",
"10",
None,
None,
)
self.assertIn("Validation complete", status)
self.assertIn("Validation Summary", summary_md)
self.assertIn("Mismatch Focus", focus_md)
self.assertTrue(hasattr(fig, "axes"))
self.assertGreaterEqual(len(artifact_rows), 1)
self.assertIsInstance(focus_rows, list)
self.assertGreaterEqual(len(rows), 1)
self.assertIn("json", state)
self.assertIn("html", state)
json_path = app._dl_validation_json(state)
html_path = app._dl_validation_html(state)
try:
self.assertTrue(pathlib.Path(json_path).exists())
self.assertTrue(pathlib.Path(html_path).exists())
self.assertIn('"summary"', pathlib.Path(json_path).read_text(encoding="utf-8"))
self.assertIn("Quread Validation Report", pathlib.Path(html_path).read_text(encoding="utf-8"))
finally:
for candidate in (json_path, html_path):
try:
os.remove(candidate)
except FileNotFoundError:
pass
def test_render_eda_view_supports_current_mapping_def_preview(self):
qc = QuantumStateVector(2)
qc.apply_single("H", target=0)
calibration_text = (
'{"qubits":{"0":{"gate_error":0.8,"readout_error":0.25,"t1_us":8,"t2_us":7,"fidelity":0.72},'
'"1":{"gate_error":0.05,"readout_error":0.03,"t1_us":90,"t2_us":85,"fidelity":0.99}}}'
)
status, mapping_rows, def_rows, fig, script = app._render_eda_view(
qc,
2,
2,
2,
DEFAULT_HEAT_METRIC,
None,
calibration_text,
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
"0",
"0",
"40",
"20",
"4",
"2",
"current_mapping",
"def_blockages",
None,
"",
16,
)
self.assertIn("source=def_blockages", status)
self.assertEqual(mapping_rows, [])
self.assertGreaterEqual(len(def_rows), 1)
self.assertIn("BLOCKAGES", script)
self.assertTrue(hasattr(fig, "axes"))
def test_render_eda_view_supports_uploaded_def_fragments(self):
metrics = {
"composite_risk": np.array([0.55, 0.88], dtype=float),
"activity_norm": np.array([0.55, 0.88], dtype=float),
}
fragment = to_def_blockages_fragment(
build_def_blockages(
metrics,
qubit_coords={0: (0, 0), 1: (0, 1)},
grid_cfg=DEFGridConfig(0, 0, 40, 20, 4, 2),
warning_threshold=0.45,
critical_threshold=0.7,
)
)
status, mapping_rows, def_rows, fig, script = app._render_eda_view(
QuantumStateVector(2),
2,
2,
2,
DEFAULT_HEAT_METRIC,
None,
"",
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
"0",
"0",
"40",
"20",
"4",
"2",
"uploaded_script",
"def_blockages",
None,
fragment,
16,
)
self.assertIn("source=def_blockages", status)
self.assertEqual(mapping_rows, [])
self.assertEqual(len(def_rows), 2)
self.assertEqual(script.strip(), fragment.strip())
self.assertTrue(hasattr(fig, "axes"))
def test_render_eda_view_surfaces_invalid_def_grid(self):
status, mapping_rows, def_rows, _fig, _script = app._render_eda_view(
QuantumStateVector(2),
2,
2,
2,
DEFAULT_HEAT_METRIC,
None,
"",
0.25,
0.20,
0.15,
0.25,
0.15,
0.45,
0.70,
*DEFAULT_SEVERITY_ARGS,
"0",
"0",
"-1",
"20",
"4",
"2",
"current_mapping",
"def_blockages",
None,
"",
16,
)
self.assertIn("EDA view failed:", status)
self.assertEqual(mapping_rows, [])
self.assertEqual(def_rows, [])
if __name__ == "__main__":
unittest.main()