Spaces:
Running
Running
# utils/fallback_suggester.py | |
import json | |
from sentence_transformers import SentenceTransformer, util | |
# π Load pre-trained semantic similarity model | |
model = SentenceTransformer("sentence-transformers/paraphrase-mpnet-base-v2") | |
# π Load fallback clause database | |
with open("fallback_clauses.json", "r", encoding="utf-8") as f: | |
clause_bank = json.load(f) | |
# π Extract clause labels and text | |
clause_labels = list(clause_bank.keys()) | |
clause_texts = list(clause_bank.values()) | |
clause_embeddings = model.encode(clause_texts, convert_to_tensor=True) | |
def suggest_fallback(input_clause: str, top_k: int = 3): | |
""" | |
Suggest top-k fallback clauses based on semantic similarity. | |
Args: | |
input_clause (str): The clause to analyze. | |
top_k (int): Number of fallback suggestions to return. | |
Returns: | |
str: Formatted fallback suggestions. | |
""" | |
if not input_clause or len(input_clause.strip()) == 0: | |
return "β οΈ No input clause provided." | |
input_embedding = model.encode(input_clause, convert_to_tensor=True) | |
scores = util.cos_sim(input_embedding, clause_embeddings)[0] | |
top_indices = scores.topk(k=min(top_k, len(clause_labels))).indices.tolist() | |
results = [] | |
for idx in top_indices: | |
label = clause_labels[idx] | |
suggestion = clause_texts[idx] | |
results.append(f"πΉ {label}:\n{suggestion}") | |
return "\n\n".join(results) | |