mvp / utils /analysis.py
Math
Add functions
22ecb24
# analysis.py
from utils.llm_client import llm_client, llm_so_client
from pydantic import BaseModel, Field
class Score(BaseModel):
score: int
def assign_certainty_score(st):
print(f"\nassign certainty score: {st.session_state.state.discrepancies}\n")
text = "## A. Certainty Score\n"
index = 0
for item in st.session_state.state.discrepancies:
index += 1
system_prompt = "You are a compliance analyst."
user_messages = (
f"Assess the following discrepancy and assign a certainty score from 0 to 100 indicating how certain you are that this is a real issue.\n\n"
f"**Discrepancy:**\n{item['discrepancy']}\n\n"
"Provide the certainty score as a single number."
)
item['certainty_score'] = llm_so_client(system_prompt, user_messages, responseModel=Score)
text += f"1. System prompt:\n```\n{system_prompt}\n```\n"
text += f"2. User message:\n```\n{user_messages}\n```\n"
text += f"3. Score:\n```\n {item['certainty_score']}\n```\n"
return text
def assign_severity_score(st):
print(f"\nassign severity score: {st.session_state.state.discrepancies}\n")
text = "## B. Severity Score\n"
index = 0
for item in st.session_state.state.discrepancies:
index += 1
system_prompt = "You are a risk assessment expert."
user_messages = (
f"Given the following discrepancy, assign a severity score from 0 to 100 where 0 is not severe and 100 is very severe.\n\n"
f"**Discrepancy:**\n{item['discrepancy']}\n\n"
"Provide the severity score as a single number."
)
item['severity_score'] = llm_so_client(system_prompt, user_messages, responseModel=Score)
text += f"### DISCREPANCIE {index}\n"
text += f"1. System prompt:\n```\n{system_prompt}\n```\n"
text += f"2. User message:\n```\n{user_messages}\n```\n"
text += f"3. Score:\n```\n {item['certainty_score']}\n```\n"
return text