Upload 2 files
Browse files- pages/trace.py +24 -0
- streamapp.py +24 -16
pages/trace.py
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import streamlit as st
|
3 |
+
|
4 |
+
#from .streamapp import trace_df
|
5 |
+
|
6 |
+
print("trace_df ", st.session_state['trace_df'])
|
7 |
+
|
8 |
+
trace_df = st.session_state['trace_df']
|
9 |
+
print(list(trace_df))
|
10 |
+
|
11 |
+
trace_df = trace_df.loc[:,['name', 'span_kind', 'start_time', 'end_time', 'attributes.__computed__.latency_ms', 'status_code', 'status_message', 'attributes.llm.invocation_parameters', 'attributes.llm.prompts', 'attributes.input.value', 'attributes.output.value', 'attributes.llm.prompt_template.template', 'attributes.llm.prompt_template.variables', 'attributes.llm.prompt_template.version', 'attributes.retrieval.documents']]
|
12 |
+
trace_df = trace_df.sort_values(by='start_time', ascending = False)
|
13 |
+
|
14 |
+
|
15 |
+
st.dataframe(trace_df)
|
16 |
+
|
17 |
+
# if px.active_session():
|
18 |
+
# df0 = px.active_session().get_spans_dataframe()
|
19 |
+
# if not df0.empty:
|
20 |
+
# df= df0.fillna('')
|
21 |
+
# st.dataframe(df)
|
22 |
+
|
23 |
+
|
24 |
+
#'name', 'span_kind', 'start_time', 'end_time', 'status_code', 'status_message', 'attributes.llm.invocation_parameters', 'attributes.llm.prompts', 'attributes.input.value', 'attributes.output.value', 'attributes.__computed__.latency_ms', 'attributes.llm.prompt_template.template', 'attributes.llm.prompt_template.variables', 'attributes.llm.prompt_template.version', 'attributes.retrieval.documents'
|
streamapp.py
CHANGED
@@ -32,17 +32,21 @@ import pandas as pd
|
|
32 |
# from sklearn import datasets
|
33 |
# from sklearn.ensemble import RandomForestClassifier
|
34 |
|
35 |
-
|
36 |
-
session = px.launch_app()
|
37 |
-
# If no exporter is specified, the tracer will export to the locally running Phoenix server
|
38 |
-
tracer = OpenInferenceTracer()
|
39 |
-
# If no tracer is specified, a tracer is constructed for you
|
40 |
-
LangChainInstrumentor(tracer).instrument()
|
41 |
-
print(session.url)
|
42 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
43 |
|
|
|
44 |
|
45 |
-
tab1, tab2
|
46 |
|
47 |
|
48 |
|
@@ -263,19 +267,23 @@ with tab2:
|
|
263 |
if st.form_submit_button("Evaluate"):
|
264 |
hallu_eval(question, answer, context)
|
265 |
|
266 |
-
|
267 |
|
268 |
-
|
|
|
|
|
|
|
|
|
|
|
269 |
|
270 |
|
271 |
|
272 |
-
|
273 |
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
|
280 |
|
281 |
|
|
|
32 |
# from sklearn import datasets
|
33 |
# from sklearn.ensemble import RandomForestClassifier
|
34 |
|
35 |
+
global trace_df
|
|
|
|
|
|
|
|
|
|
|
|
|
36 |
|
37 |
+
@st.cache_resource
|
38 |
+
def tracer_config():
|
39 |
+
#phoenix setup
|
40 |
+
session = px.launch_app()
|
41 |
+
# If no exporter is specified, the tracer will export to the locally running Phoenix server
|
42 |
+
tracer = OpenInferenceTracer()
|
43 |
+
# If no tracer is specified, a tracer is constructed for you
|
44 |
+
LangChainInstrumentor(tracer).instrument()
|
45 |
+
print(session.url)
|
46 |
|
47 |
+
tracer_config()
|
48 |
|
49 |
+
tab1, tab2 = st.tabs(["π RAG", "π FactVsHallucinate" ])
|
50 |
|
51 |
|
52 |
|
|
|
267 |
if st.form_submit_button("Evaluate"):
|
268 |
hallu_eval(question, answer, context)
|
269 |
|
|
|
270 |
|
271 |
+
print("activ session: ", px.active_session().get_spans_dataframe())
|
272 |
+
trace_df = px.active_session().get_spans_dataframe()
|
273 |
+
|
274 |
+
st.session_state['trace_df'] = trace_df
|
275 |
+
|
276 |
+
# with tab3:
|
277 |
|
278 |
|
279 |
|
280 |
+
# with st.form(" trace"):
|
281 |
|
282 |
+
# if px.active_session():
|
283 |
+
# df0 = px.active_session().get_spans_dataframe()
|
284 |
+
# if not df0.empty:
|
285 |
+
# df= df0.fillna('')
|
286 |
+
# st.dataframe(df)
|
287 |
|
288 |
|
289 |
|