laverdes commited on
Commit
d391513
β€’
1 Parent(s): c308272

feat: app version 1.0

Browse files
README.md CHANGED
@@ -1,7 +1,7 @@
1
  ---
2
- title: Ts Explorations
3
- emoji: πŸŒ–
4
- colorFrom: purple
5
  colorTo: blue
6
  sdk: streamlit
7
  sdk_version: 1.15.2
 
1
  ---
2
+ title: Time Series Explore
3
+ emoji: πŸ‘€
4
+ colorFrom: red
5
  colorTo: blue
6
  sdk: streamlit
7
  sdk_version: 1.15.2
app.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pathlib
3
+ import json
4
+ import pandas as pd
5
+
6
+ st.header("Time Series Preprocessing Pipeline")
7
+ st.markdown("Users can load their time-series data and select a set of transformations to prepare a training set for univariate or multivariate time-series classification.\
8
+ Go ahead and use the sidebar on the left to upload your data files in *.json* format and start exploring and transforming it!")
9
+ col1, col2 = st.columns(2)
10
+
11
+ file_names, file_bytes = [], []
12
+ with st.sidebar:
13
+ files = st.file_uploader("Load files", accept_multiple_files = True)
14
+ if files:
15
+ file_names = [file.name for file in files]
16
+ file_bytes = [file.getvalue() for file in files]
17
+ st.text("\n".join(file_names))
18
+
19
+ data_dict = dict({'trial_id':[], 'pupil_dilation':[], 'baseline':[], 'rating':[]})
20
+ with st.spinner("building base dictionary..."):
21
+ for file_data in file_bytes:
22
+ data = json.loads(file_data)
23
+ for k in data:
24
+ for i in data[k]:
25
+ for k, v in i.items():
26
+ data_dict[k].append(v)
27
+
28
+ df_base = pd.DataFrame() # {'<fields>' : []})
29
+ with col1:
30
+ if file_bytes:
31
+ with st.spinner("building base dataframe..."):
32
+ df_base = pd.DataFrame.from_dict(data_dict)
33
+ df_base["trial_id"] = df_base.trial_id.map(lambda s: "".join([c for c in s if c.isdigit()]))
34
+ df_base["len_pupil_dilation"] = df_base.pupil_dilation.map(lambda l: len(l))
35
+ df_base["len_baseline"] = df_base.baseline.map(lambda l: len(l))
36
+ st.info(f"number of files: {len(file_names)}")
37
+ st.markdown("Your original data")
38
+ st.dataframe(df_base)
39
+ else:
40
+ st.caption("Upload your data from the sidebar to start :sunglasses:")
41
+
42
+ with col2:
43
+ if not df_base.empty:
44
+ st.markdown("**Cleaning actions**")
45
+ detect_blinking = st.button("Detect blinking ('0.0' values)")
46
+ number_of_blinks = 0
47
+ if detect_blinking:
48
+ # Initialization of session_state
49
+ if 'df' not in st.session_state:
50
+ st.session_state['df'] = df_base
51
+ for ser in df_base['pupil_dilation']:
52
+ for f in ser:
53
+ if f == 0.0:
54
+ number_of_blinks += 1
55
+
56
+ for ser in df_base['baseline']:
57
+ for f in ser:
58
+ if f == 0.0:
59
+ number_of_blinks += 1
60
+ # Initialization of session_state
61
+ if 'blinks' not in st.session_state:
62
+ st.session_state['blinks'] = number_of_blinks
63
+
64
+ if "blinks" in st.session_state.keys():
65
+ st.info(f"blinking values (0.0) were found in {number_of_blinks} time-steps in all your data")
66
+ remove_blinking = st.button("Remove blinking")
67
+ # df in column 2
68
+ if remove_blinking:
69
+ df_right = st.session_state.df.copy(deep=True)
70
+ df_right.pupil_dilation = df_right.pupil_dilation.map(lambda ser: [f for f in ser if f != 0.0])
71
+ df_right.baseline = df_right.baseline.map(lambda ser: [f for f in ser if f != 0.0])
72
+ st.success("blinking values have been removed!")
73
+ st.info("after transformation")
74
+ st.dataframe(df_right)
75
+ elif detect_blinking and not number_of_blinks:
76
+ st.caption("no blinking values were found in your data!")
77
+
78
+ if not df_base.empty:
79
+ st.warning("consider running outlier detection to clean your data!", icon="⚠️")
80
+
81
+ # for key, value in st.session_state.items():
82
+ # st.success(f"{key}: {value}")
83
+
84
+ # reloading new samples would damage the st-session_state loading, vars are only written once
pages/1_πŸ“ˆ_Plotting.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import streamlit as st
2
+
3
+ st.warning("We are working on this functionality!")
pages/2_πŸ“Š_Outlier_Detection.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import streamlit as st
2
+
3
+ st.warning("We are working on this functionality. It will soon be ready!")
pages/3_πŸ‹οΈ_Training.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import streamlit as st
2
+
3
+ st.warning("We are working on this functionality! πŸ‹οΈ")