Alexander Seifert commited on
Commit
7a75a86
1 Parent(s): 9556889

turn into module for docs

Browse files
README.md CHANGED
@@ -6,7 +6,7 @@ colorTo: indigo
6
  python_version: 3.9
7
  sdk: streamlit
8
  sdk_version: 1.10.0
9
- app_file: src/main.py
10
  pinned: true
11
  ---
12
 
6
  python_version: 3.9
7
  sdk: streamlit
8
  sdk_version: 1.10.0
9
+ app_file: src/app.py
10
  pinned: true
11
  ---
12
 
src/__init__.py ADDED
File without changes
src/{main.py → app.py} RENAMED
@@ -2,8 +2,8 @@ import pandas as pd
2
  import streamlit as st
3
  from streamlit_option_menu import option_menu
4
 
5
- from load import load_context
6
- from subpages import (
7
  DebugPage,
8
  FindDuplicatesPage,
9
  HomePage,
@@ -16,10 +16,10 @@ from subpages import (
16
  RandomSamplesPage,
17
  RawDataPage,
18
  )
19
- from subpages.attention import AttentionPage
20
- from subpages.hidden_states import HiddenStatesPage
21
- from subpages.inspect import InspectPage
22
- from utils import classmap
23
 
24
  sts = st.sidebar
25
  st.set_page_config(
2
  import streamlit as st
3
  from streamlit_option_menu import option_menu
4
 
5
+ from src.load import load_context
6
+ from src.subpages import (
7
  DebugPage,
8
  FindDuplicatesPage,
9
  HomePage,
16
  RandomSamplesPage,
17
  RawDataPage,
18
  )
19
+ from src.subpages.attention import AttentionPage
20
+ from src.subpages.hidden_states import HiddenStatesPage
21
+ from src.subpages.inspect import InspectPage
22
+ from src.utils import classmap
23
 
24
  sts = st.sidebar
25
  st.set_page_config(
src/data.py CHANGED
@@ -7,7 +7,7 @@ from datasets import Dataset, DatasetDict, load_dataset # type: ignore
7
  from torch.nn.functional import cross_entropy
8
  from transformers import DataCollatorForTokenClassification # type: ignore
9
 
10
- from utils import device, tokenizer_hash_funcs
11
 
12
 
13
  @st.cache(allow_output_mutation=True)
7
  from torch.nn.functional import cross_entropy
8
  from transformers import DataCollatorForTokenClassification # type: ignore
9
 
10
+ from src.utils import device, tokenizer_hash_funcs
11
 
12
 
13
  @st.cache(allow_output_mutation=True)
src/load.py CHANGED
@@ -4,10 +4,10 @@ import pandas as pd
4
  import streamlit as st
5
  from datasets import Dataset # type: ignore
6
 
7
- from data import encode_dataset, get_collator, get_data, get_split_df
8
- from model import get_encoder, get_model, get_tokenizer
9
- from subpages import Context
10
- from utils import align_sample, device, explode_df
11
 
12
  _TOKENIZER_NAME = (
13
  "xlm-roberta-base",
4
  import streamlit as st
5
  from datasets import Dataset # type: ignore
6
 
7
+ from src.data import encode_dataset, get_collator, get_data, get_split_df
8
+ from src.model import get_encoder, get_model, get_tokenizer
9
+ from src.subpages import Context
10
+ from src.utils import align_sample, device, explode_df
11
 
12
  _TOKENIZER_NAME = (
13
  "xlm-roberta-base",
src/subpages/__init__.py CHANGED
@@ -1,14 +1,14 @@
1
- from subpages.attention import AttentionPage
2
- from subpages.debug import DebugPage
3
- from subpages.find_duplicates import FindDuplicatesPage
4
- from subpages.hidden_states import HiddenStatesPage
5
- from subpages.home import HomePage
6
- from subpages.inspect import InspectPage
7
- from subpages.losses import LossesPage
8
- from subpages.lossy_samples import LossySamplesPage
9
- from subpages.metrics import MetricsPage
10
- from subpages.misclassified import MisclassifiedPage
11
- from subpages.page import Context, Page
12
- from subpages.probing import ProbingPage
13
- from subpages.random_samples import RandomSamplesPage
14
- from subpages.raw_data import RawDataPage
1
+ from src.subpages.attention import AttentionPage
2
+ from src.subpages.debug import DebugPage
3
+ from src.subpages.find_duplicates import FindDuplicatesPage
4
+ from src.subpages.hidden_states import HiddenStatesPage
5
+ from src.subpages.home import HomePage
6
+ from src.subpages.inspect import InspectPage
7
+ from src.subpages.losses import LossesPage
8
+ from src.subpages.lossy_samples import LossySamplesPage
9
+ from src.subpages.metrics import MetricsPage
10
+ from src.subpages.misclassified import MisclassifiedPage
11
+ from src.subpages.page import Context, Page
12
+ from src.subpages.probing import ProbingPage
13
+ from src.subpages.random_samples import RandomSamplesPage
14
+ from src.subpages.raw_data import RawDataPage
src/subpages/attention.py CHANGED
@@ -2,7 +2,7 @@ import ecco
2
  import streamlit as st
3
  from streamlit.components.v1 import html
4
 
5
- from subpages.page import Context, Page # type: ignore
6
 
7
  SETUP_HTML = """
8
  <script src="https://requirejs.org/docs/release/2.3.6/minified/require.js"></script>
2
  import streamlit as st
3
  from streamlit.components.v1 import html
4
 
5
+ from src.subpages.page import Context, Page # type: ignore
6
 
7
  SETUP_HTML = """
8
  <script src="https://requirejs.org/docs/release/2.3.6/minified/require.js"></script>
src/subpages/debug.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
  from pip._internal.operations import freeze
3
 
4
- from subpages.page import Context, Page
5
 
6
 
7
  class DebugPage(Page):
1
  import streamlit as st
2
  from pip._internal.operations import freeze
3
 
4
+ from src.subpages.page import Context, Page
5
 
6
 
7
  class DebugPage(Page):
src/subpages/faiss.py CHANGED
@@ -1,8 +1,8 @@
1
  import streamlit as st
2
  from datasets import Dataset
3
 
4
- from subpages.page import Context, Page # type: ignore
5
- from utils import device, explode_df, htmlify_labeled_example, tag_text
6
 
7
 
8
  class FaissPage(Page):
1
  import streamlit as st
2
  from datasets import Dataset
3
 
4
+ from src.subpages.page import Context, Page # type: ignore
5
+ from src.utils import device, explode_df, htmlify_labeled_example, tag_text
6
 
7
 
8
  class FaissPage(Page):
src/subpages/find_duplicates.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
  from sentence_transformers.util import cos_sim
3
 
4
- from subpages.page import Context, Page
5
 
6
 
7
  @st.cache()
1
  import streamlit as st
2
  from sentence_transformers.util import cos_sim
3
 
4
+ from src.subpages.page import Context, Page
5
 
6
 
7
  @st.cache()
src/subpages/hidden_states.py CHANGED
@@ -3,7 +3,7 @@ import plotly.express as px
3
  import plotly.graph_objects as go
4
  import streamlit as st
5
 
6
- from subpages.page import Context, Page
7
 
8
 
9
  @st.cache
3
  import plotly.graph_objects as go
4
  import streamlit as st
5
 
6
+ from src.subpages.page import Context, Page
7
 
8
 
9
  @st.cache
src/subpages/home.py CHANGED
@@ -4,9 +4,9 @@ from typing import Optional
4
 
5
  import streamlit as st
6
 
7
- from data import get_data
8
- from subpages.page import Context, Page
9
- from utils import PROJ, classmap, color_map_color
10
 
11
  _SENTENCE_ENCODER_MODEL = (
12
  "sentence-transformers/all-MiniLM-L6-v2",
4
 
5
  import streamlit as st
6
 
7
+ from src.data import get_data
8
+ from src.subpages.page import Context, Page
9
+ from src.utils import PROJ, classmap, color_map_color
10
 
11
  _SENTENCE_ENCODER_MODEL = (
12
  "sentence-transformers/all-MiniLM-L6-v2",
src/subpages/inspect.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
 
3
- from subpages.page import Context, Page
4
- from utils import aggrid_interactive_table, colorize_classes
5
 
6
 
7
  class InspectPage(Page):
1
  import streamlit as st
2
 
3
+ from src.subpages.page import Context, Page
4
+ from src.utils import aggrid_interactive_table, colorize_classes
5
 
6
 
7
  class InspectPage(Page):
src/subpages/losses.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
 
3
- from subpages.page import Context, Page
4
- from utils import AgGrid, aggrid_interactive_table
5
 
6
 
7
  @st.cache
1
  import streamlit as st
2
 
3
+ from src.subpages.page import Context, Page
4
+ from src.utils import AgGrid, aggrid_interactive_table
5
 
6
 
7
  @st.cache
src/subpages/lossy_samples.py CHANGED
@@ -1,8 +1,13 @@
1
  import pandas as pd
2
  import streamlit as st
3
 
4
- from subpages.page import Context, Page
5
- from utils import colorize_classes, get_bg_color, get_fg_color, htmlify_labeled_example
 
 
 
 
 
6
 
7
 
8
  class LossySamplesPage(Page):
1
  import pandas as pd
2
  import streamlit as st
3
 
4
+ from src.subpages.page import Context, Page
5
+ from src.utils import (
6
+ colorize_classes,
7
+ get_bg_color,
8
+ get_fg_color,
9
+ htmlify_labeled_example,
10
+ )
11
 
12
 
13
  class LossySamplesPage(Page):
src/subpages/metrics.py CHANGED
@@ -8,7 +8,7 @@ import streamlit as st
8
  from seqeval.metrics import classification_report
9
  from sklearn.metrics import ConfusionMatrixDisplay, confusion_matrix
10
 
11
- from subpages.page import Context, Page
12
 
13
 
14
  def _get_evaluation(df):
8
  from seqeval.metrics import classification_report
9
  from sklearn.metrics import ConfusionMatrixDisplay, confusion_matrix
10
 
11
+ from src.subpages.page import Context, Page
12
 
13
 
14
  def _get_evaluation(df):
src/subpages/misclassified.py CHANGED
@@ -4,8 +4,8 @@ import pandas as pd
4
  import streamlit as st
5
  from sklearn.metrics import confusion_matrix
6
 
7
- from subpages.page import Context, Page
8
- from utils import htmlify_labeled_example
9
 
10
 
11
  class MisclassifiedPage(Page):
4
  import streamlit as st
5
  from sklearn.metrics import confusion_matrix
6
 
7
+ from src.subpages.page import Context, Page
8
+ from src.utils import htmlify_labeled_example
9
 
10
 
11
  class MisclassifiedPage(Page):
src/subpages/probing.py CHANGED
@@ -1,7 +1,7 @@
1
  import streamlit as st
2
 
3
- from subpages.page import Context, Page
4
- from utils import device, tag_text
5
 
6
  _DEFAULT_SENTENCES = """
7
  Damit hatte er auf ihr letztes , völlig schiefgelaufenes Geschäftsessen angespielt .
1
  import streamlit as st
2
 
3
+ from src.subpages.page import Context, Page
4
+ from src.utils import device, tag_text
5
 
6
  _DEFAULT_SENTENCES = """
7
  Damit hatte er auf ihr letztes , völlig schiefgelaufenes Geschäftsessen angespielt .
src/subpages/random_samples.py CHANGED
@@ -1,8 +1,8 @@
1
  import pandas as pd
2
  import streamlit as st
3
 
4
- from subpages.page import Context, Page
5
- from utils import htmlify_labeled_example
6
 
7
 
8
  class RandomSamplesPage(Page):
1
  import pandas as pd
2
  import streamlit as st
3
 
4
+ from src.subpages.page import Context, Page
5
+ from src.utils import htmlify_labeled_example
6
 
7
 
8
  class RandomSamplesPage(Page):
src/subpages/raw_data.py CHANGED
@@ -1,8 +1,8 @@
1
  import pandas as pd
2
  import streamlit as st
3
 
4
- from subpages.page import Context, Page
5
- from utils import aggrid_interactive_table
6
 
7
 
8
  @st.cache
1
  import pandas as pd
2
  import streamlit as st
3
 
4
+ from src.subpages.page import Context, Page
5
+ from src.utils import aggrid_interactive_table
6
 
7
 
8
  @st.cache