import os
import streamlit as st
from spacy import displacy
from PIL import Image
import json
import requests
from pyvis.network import Network
import streamlit.components.v1 as components
from util.process_data import Entity, EntityType, Relation, Sample, SampleList
from util.tokenizer import Tokenizer
from model_inference import TransformersInference
from util.configuration import InferenceConfiguration
inference_config = InferenceConfiguration()
tokenizer = Tokenizer(inference_config.spacy_model)
SAMPLE_66 = "EStG § 66 Höhe des Kindergeldes, Zahlungszeitraum (1) Das Kindergeld beträgt monatlich für das erste und zweite Kind jeweils 219 Euro, für das dritte Kind 225 Euro und für das vierte und jedes weitere Kind jeweils 250 Euro."
SAMPLE_9 = "EStG § 9 Werbungskosten ... Zur Abgeltung dieser Aufwendungen ist für jeden Arbeitstag, an dem der Arbeitnehmer die erste Tätigkeitsstätte aufsucht eine Entfernungspauschale für jeden vollen Kilometer der Entfernung zwischen Wohnung und erster Tätigkeitsstätte von 0,30 Euro anzusetzen, höchstens jedoch 4 500 Euro im Kalenderjahr; ein höherer Betrag als 4 500 Euro ist anzusetzen, soweit der Arbeitnehmer einen eigenen oder ihm zur Nutzung überlassenen Kraftwagen benutzt."
############################################################
## Constants
############################################################
max_width_str = f"max-width: 60%;"
paragraph = None
style = ""
graph_options = '''
var options = {
"edges": {
"arrows": {
"to": {
"enabled": true,
"scaleFactor": 1.2
}
}
}
}
'''
legend_content = {
"text": "StatedKeyFigure StatedExpression Unit Range Factor Condition DeclarativeKeyFigure DeclarativeExpression",
"ents": [
{"start": 0, "end": 15, "label": "K"},
{"start": 16, "end": 32, "label": "E"},
{"start": 33, "end": 37, "label": "U"},
{"start": 38, "end": 43, "label": "R"},
{"start": 44, "end": 50, "label": "F"},
{"start": 51, "end": 60, "label": "C"},
{"start": 61, "end": 81, "label": "DK"},
{"start": 82, "end": 103, "label": "DE"},
]}
legend_options = {
"ents": ["K","U","E","R","F","C","DK","DE"],
"colors": {'K': '#46d000',"U": "#e861ef", "E": "#538cff", "R": "#ffbe00", "F": "#0fd5dc", "C":"#ff484b", "DK":"#46d000", "DE":"#538cff"}
}
legend_mapping = {"StatedKeyFigure": "K","Unit": "U","StatedExpression": "E","Range": "R","Factor": "F","Condition": "C","DeclarativeKeyFigure": "DK","DeclarativeExpression": "DE"}
edge_colors = {'hasKeyFigure': '#46d000',"hasUnit": "#e861ef", "hasExpression": "#538cff", "hasRange": "#ffbe00", "hasFactor": "#0fd5dc", "hasCondition":"#ff484b", "join":"#aaa", "Typ":"#aaa", "hasParagraph": "#FF8B15"}
############################################################
## Function definitions
############################################################
def get_html(html: str, legend=False):
"""Convert HTML so it can be rendered."""
WRAPPER = """
{}
"""
if legend: WRAPPER = """{}
"""
# Newlines seem to mess with the rendering
html = html.replace("\n", " ")
return WRAPPER.format(html)
def get_displacy_ent_obj(paragraph, bedingungen=False, send_request=False):
entities = []
for entity in paragraph['entities']:
label = entity["entity"] if not send_request else entity["ent_type"]["label"]
if (bedingungen and label == "Condition") or (not bedingungen and label != "Condition") :
entities.append({
'start': entity['start'],
'end': entity["end"],
'label': legend_mapping[label]
})
return [{'text': paragraph['text'], 'ents': entities}]
def request_extractor(text_data):
try:
data = SampleList(
samples=[
Sample(
idx=0,
text=str(text_data),
entities=[],
relations=[]
)
]
)
tokenizer.run(data)
model_inference = TransformersInference(inference_config)
model_inference.run_inference(data)
return data.dict()["samples"][0]
except Exception as e:
result = e
return {"text":"error","entities":[], "relations":[]}
def generate_graph(nodes, edges, send_request=False):
net = Network(height="450px", width="100%")#, bgcolor="#222222", font_color="white", select_menu=True, filter_menu=True)
for node in nodes:
if "id" in node:
label = node["entity"] if not send_request else node["ent_type"]["label"]
node_color = legend_options["colors"][legend_mapping[label]]
node_label = node["text"] if len(node["text"]) < 30 else (node["text"][:27]+" ...")
if label in ["Kennzahl", "Kennzahlumschreibung"]:
net.add_node(node["id"], label=node_label, title=node["text"], mass=2, shape="ellipse", color=node_color, physics=False)
else:
net.add_node(node["id"], label=node_label, title=node["text"], mass=1, shape="ellipse", color=node_color)
for edge in edges:
label = edge["relation"] if not send_request else edge["rel_type"]["label"]
net.add_edge(edge["head"], edge["tail"], width=1, title=label, arrowStrikethrough=False, color=edge_colors[label])
# net.force_atlas_2based() # barnes_hut() force_atlas_2based() hrepulsion() repulsion()
net.toggle_physics(True)
net.set_edge_smooth("dynamic") # dynamic, continuous, discrete, diagonalCross, straightCross, horizontal, vertical, curvedCW, curvedCCW, cubicBezier
net.set_options(graph_options)
html_graph = net.generate_html()
return html_graph
############################################################
## Page configuration
############################################################
st.set_page_config(
page_title="NLP Gesetzestexte",
menu_items={
'Get Help': None,
'Report a bug': None,
'About': "## Demonstrator NLP"
}
# layout="wide")
)
st.markdown(
f"""
""",
unsafe_allow_html=True,
)
# radio button formatting in line
st.write('', unsafe_allow_html=True)
############################################################
## Page formating
############################################################
col3, col4 = st.columns([2.4,1.6])
st.write('\n')
st.write('\n')
with col3:
st.subheader("Extraction of Key Figures from Tax Legal Texts")
st.write("Demonstrator Application for Paper 'Semantic Extraction of Key Figures and Their Properties From Tax Legal Texts using Neural Models' presented at the sixth Workshop on Automated Semantic Analysis of Information in Legal Text (ASAIL 2023)")
st.write("The paper can be found here: [Paper](https://ceur-ws.org/Vol-3441/paper7.pdf)")
st.write('\n')
st.write('This demmosntrator processes German tax laws as input and outputs the extracted key figures with their properties and relations, based on the presented semantic model.')
with col4:
st.caption("Semantic Model")
image = Image.open('util/ontology.png')
st.image(image, width=350)
text_option = st.radio("Select Example", ["Insert your paragraph", "EStG § 66 Kindergeld", "EStG § 9 Werbungskosten"])
st.write('\n')
if text_option == "EStG § 66 Kindergeld":
text_area_input = st.text_area("Given paragraph", SAMPLE_66, height=200)
elif text_option == "EStG § 9 Werbungskosten":
text_area_input = st.text_area("Given paragraph", SAMPLE_9, height=200)
else:
text_area_input = st.text_area("Given paragraph", "", height=200)
if st.button("Start Extraction") and text_area_input != "":
with st.spinner('Executing Extraction ...'):
paragraph = request_extractor(text_area_input)
if paragraph["text"] == "error":
st.error("Error while executing extraction.")
else:
legend = displacy.render([legend_content], style="ent", options=legend_options, manual=True)
st.write(f"{style}{get_html(legend, True)}", unsafe_allow_html=True)
st.caption("Entities:")
extracted_data = get_displacy_ent_obj(paragraph, False, True)
html = displacy.render(extracted_data, style="ent", options=legend_options, manual=True)
st.write(f"{style}{get_html(html)}", unsafe_allow_html=True)
st.write('\n')
st.caption("Conditions:")
extracted_data = get_displacy_ent_obj(paragraph, True, True)
html = displacy.render(extracted_data, style="ent", options=legend_options, manual=True)
st.write(f"{style}{get_html(html)}", unsafe_allow_html=True)
st.write('\n')
st.caption("\n\nRelations:")
html_graph_req = generate_graph(paragraph["entities"], paragraph["relations"], send_request=True)
components.html(html_graph_req, height=500)
st.write('\n')
with st.expander("Show JSON"):
st.json(paragraph)