Spaces:
Running
on
Zero
Running
on
Zero
DmitryRyumin
commited on
Commit
β’
9df1fd8
1
Parent(s):
0143949
Summary
Browse files- .flake8 +5 -0
- .gitignore +173 -0
- LICENSE +21 -0
- README.md +7 -6
- app.css +257 -0
- app.py +87 -0
- app/__init__.py +0 -0
- app/app.py +121 -0
- app/authors.py +110 -0
- app/components.py +216 -0
- app/config.py +38 -0
- app/description.py +35 -0
- app/description_steps.py +31 -0
- app/event_handlers/__init__.py +0 -0
- app/event_handlers/calculate_practical_tasks.py +649 -0
- app/event_handlers/calculate_pt_scores_blocks.py +280 -0
- app/event_handlers/clear_blocks.py +150 -0
- app/event_handlers/dropdown_candidates.py +109 -0
- app/event_handlers/event_handlers.py +404 -0
- app/event_handlers/examples_blocks.py +30 -0
- app/event_handlers/files.py +97 -0
- app/event_handlers/languages.py +125 -0
- app/event_handlers/practical_subtasks.py +420 -0
- app/event_handlers/practical_task_sorted.py +137 -0
- app/event_handlers/practical_tasks.py +22 -0
- app/mbti_description.py +31 -0
- app/oceanai_init.py +76 -0
- app/practical_tasks.py +27 -0
- app/requirements_app.py +37 -0
- app/tabs.py +694 -0
- app/utils.py +86 -0
- app/video_metadata.py +25 -0
- config.toml +155 -0
- images/AA.jpg +0 -0
- images/RU.png +0 -0
- images/UK.png +0 -0
- images/calculate_pt_scores.ico +0 -0
- images/clear.ico +0 -0
- images/email.svg +1 -0
- images/examples.ico +0 -0
- images/name.svg +1 -0
- images/phone.svg +1 -0
- images/pt.ico +0 -0
- practical_tasks.yaml +14 -0
- requirements.txt +5 -0
- video_metadata.yaml +31 -0
.flake8
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
; https://www.flake8rules.com/
|
2 |
+
|
3 |
+
[flake8]
|
4 |
+
max-line-length = 120
|
5 |
+
ignore = E203, E402, E741, W503
|
.gitignore
ADDED
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Compiled source #
|
2 |
+
###################
|
3 |
+
*.com
|
4 |
+
*.class
|
5 |
+
*.dll
|
6 |
+
*.exe
|
7 |
+
*.o
|
8 |
+
*.so
|
9 |
+
*.pyc
|
10 |
+
|
11 |
+
# Packages #
|
12 |
+
############
|
13 |
+
# it's better to unpack these files and commit the raw source
|
14 |
+
# git has its own built in compression methods
|
15 |
+
*.7z
|
16 |
+
*.dmg
|
17 |
+
*.gz
|
18 |
+
*.iso
|
19 |
+
*.rar
|
20 |
+
#*.tar
|
21 |
+
*.zip
|
22 |
+
|
23 |
+
# Logs and databases #
|
24 |
+
######################
|
25 |
+
*.log
|
26 |
+
*.sqlite
|
27 |
+
|
28 |
+
# OS generated files #
|
29 |
+
######################
|
30 |
+
.DS_Store
|
31 |
+
ehthumbs.db
|
32 |
+
Icon
|
33 |
+
Thumbs.db
|
34 |
+
.tmtags
|
35 |
+
.idea
|
36 |
+
.vscode
|
37 |
+
tags
|
38 |
+
vendor.tags
|
39 |
+
tmtagsHistory
|
40 |
+
*.sublime-project
|
41 |
+
*.sublime-workspace
|
42 |
+
.bundle
|
43 |
+
|
44 |
+
# Byte-compiled / optimized / DLL files
|
45 |
+
__pycache__/
|
46 |
+
*.py[cod]
|
47 |
+
*$py.class
|
48 |
+
|
49 |
+
# C extensions
|
50 |
+
*.so
|
51 |
+
|
52 |
+
# Distribution / packaging
|
53 |
+
.Python
|
54 |
+
build/
|
55 |
+
develop-eggs/
|
56 |
+
dist/
|
57 |
+
downloads/
|
58 |
+
eggs/
|
59 |
+
.eggs/
|
60 |
+
lib/
|
61 |
+
lib64/
|
62 |
+
parts/
|
63 |
+
sdist/
|
64 |
+
var/
|
65 |
+
wheels/
|
66 |
+
pip-wheel-metadata/
|
67 |
+
share/python-wheels/
|
68 |
+
*.egg-info/
|
69 |
+
.installed.cfg
|
70 |
+
*.egg
|
71 |
+
MANIFEST
|
72 |
+
node_modules/
|
73 |
+
|
74 |
+
# PyInstaller
|
75 |
+
# Usually these files are written by a python script from a template
|
76 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
77 |
+
*.manifest
|
78 |
+
*.spec
|
79 |
+
|
80 |
+
# Installer logs
|
81 |
+
pip-log.txt
|
82 |
+
pip-delete-this-directory.txt
|
83 |
+
|
84 |
+
# Unit test / coverage reports
|
85 |
+
htmlcov/
|
86 |
+
.tox/
|
87 |
+
.nox/
|
88 |
+
.coverage
|
89 |
+
.coverage.*
|
90 |
+
.cache
|
91 |
+
nosetests.xml
|
92 |
+
coverage.xml
|
93 |
+
*.cover
|
94 |
+
.hypothesis/
|
95 |
+
.pytest_cache/
|
96 |
+
|
97 |
+
# Translations
|
98 |
+
*.mo
|
99 |
+
*.pot
|
100 |
+
|
101 |
+
# Django stuff:
|
102 |
+
*.log
|
103 |
+
local_settings.py
|
104 |
+
db.sqlite3
|
105 |
+
db.sqlite3-journal
|
106 |
+
|
107 |
+
# Flask stuff:
|
108 |
+
instance/
|
109 |
+
.webassets-cache
|
110 |
+
|
111 |
+
# Scrapy stuff:
|
112 |
+
.scrapy
|
113 |
+
|
114 |
+
# Sphinx documentation
|
115 |
+
docs/_build/
|
116 |
+
|
117 |
+
# PyBuilder
|
118 |
+
target/
|
119 |
+
|
120 |
+
# Jupyter Notebook
|
121 |
+
.ipynb_checkpoints
|
122 |
+
|
123 |
+
# IPython
|
124 |
+
profile_default/
|
125 |
+
ipython_config.py
|
126 |
+
|
127 |
+
# pyenv
|
128 |
+
.python-version
|
129 |
+
|
130 |
+
# pipenv
|
131 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
132 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
133 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
134 |
+
# install all needed dependencies.
|
135 |
+
#Pipfile.lock
|
136 |
+
|
137 |
+
# celery beat schedule file
|
138 |
+
celerybeat-schedule
|
139 |
+
|
140 |
+
# SageMath parsed files
|
141 |
+
*.sage.py
|
142 |
+
|
143 |
+
# Environments
|
144 |
+
.env
|
145 |
+
.venv
|
146 |
+
env/
|
147 |
+
venv/
|
148 |
+
ENV/
|
149 |
+
env.bak/
|
150 |
+
venv.bak/
|
151 |
+
|
152 |
+
# Spyder project settings
|
153 |
+
.spyderproject
|
154 |
+
.spyproject
|
155 |
+
|
156 |
+
# Rope project settings
|
157 |
+
.ropeproject
|
158 |
+
|
159 |
+
# mkdocs documentation
|
160 |
+
/site
|
161 |
+
|
162 |
+
# mypy
|
163 |
+
.mypy_cache/
|
164 |
+
.dmypy.json
|
165 |
+
dmypy.json
|
166 |
+
|
167 |
+
# Pyre type checker
|
168 |
+
.pyre/
|
169 |
+
|
170 |
+
# Custom
|
171 |
+
logs/
|
172 |
+
models/
|
173 |
+
*.csv
|
LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2024 Elena Ryumina and Dmitry Ryumin
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
README.md
CHANGED
@@ -1,13 +1,14 @@
|
|
1 |
---
|
2 |
-
title: OCEANAI
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
-
colorTo:
|
6 |
sdk: gradio
|
7 |
-
sdk_version:
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: mit
|
|
|
11 |
---
|
12 |
|
13 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: OCEANAI
|
3 |
+
emoji: ππ€πππ€
|
4 |
+
colorFrom: gray
|
5 |
+
colorTo: red
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 4.40.0
|
8 |
app_file: app.py
|
9 |
pinned: false
|
10 |
license: mit
|
11 |
+
short_description: Tool to detect personality traits and automate HR-processes
|
12 |
---
|
13 |
|
14 |
+
Check out the configuration reference at <https://huggingface.co/docs/hub/spaces-config-reference>
|
app.css
ADDED
@@ -0,0 +1,257 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.noti_err {
|
2 |
+
color: #B42E2C;
|
3 |
+
}
|
4 |
+
.noti_true {
|
5 |
+
color: #006900;
|
6 |
+
}
|
7 |
+
|
8 |
+
h1 > a {
|
9 |
+
display: contents;
|
10 |
+
}
|
11 |
+
|
12 |
+
div.app-flex-container {
|
13 |
+
display: flex;
|
14 |
+
gap: 6px;
|
15 |
+
}
|
16 |
+
|
17 |
+
div.video-container {
|
18 |
+
height: 350px;
|
19 |
+
max-height: 350px;
|
20 |
+
}
|
21 |
+
|
22 |
+
div.video-column-container {
|
23 |
+
max-width: 640px;
|
24 |
+
}
|
25 |
+
|
26 |
+
div.video-sorted-container {
|
27 |
+
max-width: 640px;
|
28 |
+
max-height: 350px;
|
29 |
+
}
|
30 |
+
|
31 |
+
div.files-container {
|
32 |
+
height: 350px;
|
33 |
+
max-height: 350px;
|
34 |
+
}
|
35 |
+
|
36 |
+
div.files-container tr {
|
37 |
+
padding-right: 20px;
|
38 |
+
}
|
39 |
+
|
40 |
+
div.files-container div.icon-buttons > button,
|
41 |
+
div.video-sorted-container div.icon-buttons > button {
|
42 |
+
display: none;
|
43 |
+
}
|
44 |
+
|
45 |
+
div.files-container:hover label[data-testid="block-label"],
|
46 |
+
div.video-container:hover label[data-testid="block-label"],
|
47 |
+
div.video-sorted-container:hover label[data-testid="block-label"] {
|
48 |
+
display: none;
|
49 |
+
}
|
50 |
+
|
51 |
+
div.dataframe table > thead,
|
52 |
+
div.dataframe table > tbody {
|
53 |
+
width: auto !important;
|
54 |
+
display: table-header-group;
|
55 |
+
}
|
56 |
+
|
57 |
+
div.dataframe div.table-wrap {
|
58 |
+
height: auto !important;
|
59 |
+
}
|
60 |
+
|
61 |
+
div.files-container div.file-preview-holder {
|
62 |
+
overflow-y: scroll;
|
63 |
+
height: 100%;
|
64 |
+
}
|
65 |
+
|
66 |
+
div.files-container label[data-testid="block-label"] {
|
67 |
+
position: absolute;
|
68 |
+
}
|
69 |
+
|
70 |
+
.calculate_oceanai,
|
71 |
+
.calculate_practical_task {
|
72 |
+
display: flex;
|
73 |
+
padding: 10px 20px;
|
74 |
+
font-size: 16px;
|
75 |
+
font-weight: bold;
|
76 |
+
text-align: center;
|
77 |
+
text-decoration: none;
|
78 |
+
cursor: pointer;
|
79 |
+
border: var(--button-border-width) solid var(--button-primary-border-color);
|
80 |
+
background: var(--button-primary-background-fill);
|
81 |
+
color: var(--button-primary-text-color);
|
82 |
+
border-radius: 8px;
|
83 |
+
transition: all 0.3s ease;
|
84 |
+
}
|
85 |
+
|
86 |
+
.examples_oceanai, .clear_oceanai {
|
87 |
+
display: flex;
|
88 |
+
padding: 10px 20px;
|
89 |
+
font-size: 16px;
|
90 |
+
font-weight: bold;
|
91 |
+
text-align: center;
|
92 |
+
text-decoration: none;
|
93 |
+
cursor: pointer;
|
94 |
+
border-radius: 8px;
|
95 |
+
transition: all 0.3s ease;
|
96 |
+
}
|
97 |
+
|
98 |
+
.calculate_oceanai[disabled],
|
99 |
+
.calculate_practical_task[disabled],
|
100 |
+
.examples_oceanai[disabled],
|
101 |
+
.clear_oceanai[disabled] {
|
102 |
+
cursor: not-allowed;
|
103 |
+
opacity: 0.6;
|
104 |
+
}
|
105 |
+
|
106 |
+
.calculate_oceanai:hover:not([disabled]),
|
107 |
+
.calculate_practical_task:hover:not([disabled]) {
|
108 |
+
border-color: var(--button-primary-border-color-hover);
|
109 |
+
background: var(--button-primary-background-fill-hover);
|
110 |
+
color: var(--button-primary-text-color-hover);
|
111 |
+
}
|
112 |
+
|
113 |
+
.calculate_oceanai:active:not([disabled]),
|
114 |
+
.calculate_practical_task:active:not([disabled]),
|
115 |
+
.examples_oceanai:active:not([disabled]),
|
116 |
+
.clear_oceanai:active:not([disabled]) {
|
117 |
+
transform: scale(0.98);
|
118 |
+
}
|
119 |
+
|
120 |
+
.settings-container {
|
121 |
+
max-width: fit-content;
|
122 |
+
}
|
123 |
+
|
124 |
+
.number-container {
|
125 |
+
max-width: fit-content;
|
126 |
+
min-width: fit-content !important;
|
127 |
+
}
|
128 |
+
|
129 |
+
.dropdown-container {
|
130 |
+
max-width: fit-content;
|
131 |
+
}
|
132 |
+
|
133 |
+
div.languages-container_wrapper {
|
134 |
+
display: grid;
|
135 |
+
justify-content: end;
|
136 |
+
width: auto;
|
137 |
+
position: absolute;
|
138 |
+
right: 0px;
|
139 |
+
z-index: 10;
|
140 |
+
}
|
141 |
+
|
142 |
+
div.languages-container {
|
143 |
+
display: flex;
|
144 |
+
align-items: center;
|
145 |
+
gap: 10px;
|
146 |
+
width: fit-content;
|
147 |
+
}
|
148 |
+
|
149 |
+
div.languages-container > div.country_flags {
|
150 |
+
display: contents;
|
151 |
+
min-width: none;
|
152 |
+
}
|
153 |
+
|
154 |
+
div.languages-container > div.country_flags > div.icon-buttons {
|
155 |
+
display: none;
|
156 |
+
}
|
157 |
+
|
158 |
+
div.languages-container > div.country_flags button {
|
159 |
+
width: fit-content;
|
160 |
+
cursor: default;
|
161 |
+
}
|
162 |
+
|
163 |
+
div.languages-container > div.country_flags div.image-container {
|
164 |
+
display: contents;
|
165 |
+
width: fit-content;
|
166 |
+
}
|
167 |
+
|
168 |
+
div.languages-container > div.country_flags div.image-container > button > div.image-frame > img {
|
169 |
+
height: 32px;
|
170 |
+
}
|
171 |
+
|
172 |
+
.dropdown-language-container {
|
173 |
+
display: contents;
|
174 |
+
}
|
175 |
+
|
176 |
+
.dropdown-container > div > span[data-testid="block-info"] + div {
|
177 |
+
min-width: max-content;
|
178 |
+
}
|
179 |
+
|
180 |
+
div.gradio-container > div.main > div.wrap > div.contain > #component-0 > div.form {
|
181 |
+
display: flex;
|
182 |
+
width: fit-content;
|
183 |
+
float: left;
|
184 |
+
right: 0px;
|
185 |
+
position: absolute;
|
186 |
+
z-index: 2;
|
187 |
+
}
|
188 |
+
|
189 |
+
div.gradio-container > div.main > div.wrap > div.contain > #component-0 > div.tabs {
|
190 |
+
margin-top: 20px;
|
191 |
+
}
|
192 |
+
|
193 |
+
div.dataframe span.wrapper_mbti {
|
194 |
+
display: inline-block;
|
195 |
+
border-radius: 2px;
|
196 |
+
overflow: hidden;
|
197 |
+
}
|
198 |
+
|
199 |
+
div.dataframe span.wrapper_mbti > span.true, div.dataframe span.wrapper_mbti > span.err {
|
200 |
+
color: #FFFFFF;
|
201 |
+
display: inline-block;
|
202 |
+
padding: 2px;
|
203 |
+
}
|
204 |
+
|
205 |
+
div.dataframe span.wrapper_mbti > span.true {
|
206 |
+
background-color: #006900;
|
207 |
+
}
|
208 |
+
|
209 |
+
div.dataframe span.wrapper_mbti > span.err {
|
210 |
+
background-color: #B42E2C;
|
211 |
+
}
|
212 |
+
|
213 |
+
div.mbti-dataframe div.table-wrap {
|
214 |
+
--cell-width-0: 260px !important;
|
215 |
+
}
|
216 |
+
|
217 |
+
div.about_app pre {
|
218 |
+
text-wrap: wrap;
|
219 |
+
}
|
220 |
+
|
221 |
+
div.name-container, div.surname-container,
|
222 |
+
div.email-container, div.phone-container {
|
223 |
+
gap: 4px;
|
224 |
+
display: flex;
|
225 |
+
flex-flow: row;
|
226 |
+
align-items: center;
|
227 |
+
}
|
228 |
+
|
229 |
+
div.name-container > div.metadata_name-logo,
|
230 |
+
div.surname-container > div.metadata_surname-logo,
|
231 |
+
div.email-container > div.metadata_email-logo,
|
232 |
+
div.phone-container > div.metadata_phone-logo {
|
233 |
+
display: contents;
|
234 |
+
min-width: none;
|
235 |
+
}
|
236 |
+
|
237 |
+
div.name-container > div.metadata_name-logo button,
|
238 |
+
div.surname-container > div.metadata_surname-logo button,
|
239 |
+
div.email-container > div.metadata_email-logo button,
|
240 |
+
div.phone-container > div.metadata_phone-logo button {
|
241 |
+
width: fit-content;
|
242 |
+
cursor: default;
|
243 |
+
}
|
244 |
+
|
245 |
+
div.name-container > div.metadata_name-logo div.image-container,
|
246 |
+
div.surname-container > div.metadata_surname-logo div.image-container,
|
247 |
+
div.email-container > div.metadata_email-logo div.image-container,
|
248 |
+
div.phone-container > div.metadata_phone-logo div.image-container {
|
249 |
+
width: fit-content;
|
250 |
+
}
|
251 |
+
|
252 |
+
div.name-container > div.metadata_name-logo div.image-container > button > div > img,
|
253 |
+
div.surname-container > div.metadata_surname-logo div.image-container > button > div > img,
|
254 |
+
div.email-container > div.metadata_email-logo div.image-container > button > div > img,
|
255 |
+
div.phone-container > div.metadata_phone-logo div.image-container > button > div > img {
|
256 |
+
height: 32px;
|
257 |
+
}
|
app.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: app.py
|
3 |
+
Authors: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: OCEANAI App for gradio.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
from app.config import CONFIG_NAME, config_data, load_tab_creators
|
12 |
+
from app.event_handlers.event_handlers import setup_app_event_handlers
|
13 |
+
from app import tabs
|
14 |
+
from app.components import dropdown_create_ui
|
15 |
+
|
16 |
+
gr.set_static_paths(paths=[config_data.StaticPaths_IMAGES])
|
17 |
+
|
18 |
+
|
19 |
+
def create_gradio_app() -> gr.Blocks:
|
20 |
+
with gr.Blocks(
|
21 |
+
theme=gr.themes.Default(), css_paths=config_data.AppSettings_CSS_PATH
|
22 |
+
) as gradio_app:
|
23 |
+
with gr.Column(
|
24 |
+
visible=True,
|
25 |
+
render=True,
|
26 |
+
variant="default",
|
27 |
+
elem_classes="languages-container_wrapper",
|
28 |
+
):
|
29 |
+
with gr.Row(
|
30 |
+
visible=True,
|
31 |
+
render=True,
|
32 |
+
variant="default",
|
33 |
+
elem_classes="languages-container",
|
34 |
+
) as languages_row:
|
35 |
+
country_flags = gr.Image(
|
36 |
+
value=config_data.StaticPaths_IMAGES
|
37 |
+
+ config_data.Images_LANGUAGES[0],
|
38 |
+
container=False,
|
39 |
+
interactive=False,
|
40 |
+
show_label=False,
|
41 |
+
visible=True,
|
42 |
+
show_download_button=False,
|
43 |
+
elem_classes="country_flags",
|
44 |
+
show_fullscreen_button=False,
|
45 |
+
)
|
46 |
+
|
47 |
+
languages = dropdown_create_ui(
|
48 |
+
label=None,
|
49 |
+
info=None,
|
50 |
+
choices=config_data.Settings_LANGUAGES_EN,
|
51 |
+
value=config_data.Settings_LANGUAGES_EN[0],
|
52 |
+
visible=True,
|
53 |
+
show_label=False,
|
54 |
+
elem_classes="dropdown-language-container",
|
55 |
+
interactive=True,
|
56 |
+
)
|
57 |
+
|
58 |
+
tab_results = {}
|
59 |
+
ts = []
|
60 |
+
|
61 |
+
available_functions = {
|
62 |
+
attr: getattr(tabs, attr)
|
63 |
+
for attr in dir(tabs)
|
64 |
+
if callable(getattr(tabs, attr)) and attr.endswith("_tab")
|
65 |
+
}
|
66 |
+
|
67 |
+
tab_creators = load_tab_creators(CONFIG_NAME, available_functions)
|
68 |
+
|
69 |
+
for tab_name, create_tab_function in tab_creators.items():
|
70 |
+
with gr.Tab(tab_name) as tab:
|
71 |
+
app_instance = create_tab_function()
|
72 |
+
tab_results[tab_name] = app_instance
|
73 |
+
ts.append(tab)
|
74 |
+
|
75 |
+
setup_app_event_handlers(
|
76 |
+
*tab_results[list(tab_results.keys())[0]],
|
77 |
+
*ts,
|
78 |
+
languages_row,
|
79 |
+
country_flags,
|
80 |
+
languages
|
81 |
+
)
|
82 |
+
|
83 |
+
return gradio_app
|
84 |
+
|
85 |
+
|
86 |
+
if __name__ == "__main__":
|
87 |
+
create_gradio_app().queue(api_open=False).launch(share=False)
|
app/__init__.py
ADDED
File without changes
|
app/app.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: app.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: About the app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
APP = """
|
9 |
+
<div class="about_app">
|
10 |
+
<div style="max-width: 90%; margin: auto; padding: 20px;">
|
11 |
+
<p style="text-align: center;">
|
12 |
+
<img src="https://raw.githubusercontent.com/aimclub/OCEANAI/main/docs/source/_static/logo.svg" alt="Logo" style="width: 20%; height: auto; display: block; margin: auto;">
|
13 |
+
</p>
|
14 |
+
|
15 |
+
<blockquote>
|
16 |
+
<a href="https://oceanai.readthedocs.io/en/latest/">OCEAN-AI</a> is an open-source library consisting of a set of algorithms for intellectual analysis of human behavior based on multimodal data for automatic personality traits (PT) assessment. The library evaluates five PT: <strong>O</strong>penness to experience, <strong>C</strong>onscientiousness, <strong>E</strong>xtraversion, <strong>A</strong>greeableness, Non-<strong>N</strong>euroticism.
|
17 |
+
</blockquote>
|
18 |
+
|
19 |
+
<p style="text-align: center;">
|
20 |
+
<img src="https://raw.githubusercontent.com/aimclub/OCEANAI/main/docs/source/_static/Pipeline_OCEANAI.en.svg" alt="Pipeline" style="max-width: 60%; height: auto; display: block; margin: auto;">
|
21 |
+
</p>
|
22 |
+
|
23 |
+
<hr>
|
24 |
+
|
25 |
+
<h2>OCEAN-AI includes three main algorithms:</h2>
|
26 |
+
<ol>
|
27 |
+
<li>Audio Information Analysis Algorithm (AIA).</li>
|
28 |
+
<li>Video Information Analysis Algorithm (VIA).</li>
|
29 |
+
<li>Text Information Analysis Algorithm (TIA).</li>
|
30 |
+
<li>Multimodal Information Fusion Algorithm (MIF).</li>
|
31 |
+
</ol>
|
32 |
+
|
33 |
+
<p>The AIA, VIA and TIA algorithms implement the functions of strong artificial intelligence (AI) in terms of complexing acoustic, visual and linguistic features built on different principles (hand-crafted and deep features), i.e. these algorithms implement the approaches of composite (hybrid) AI. The necessary pre-processing of audio, video and text information, the calculation of visual, acoustic and linguistic features and the output of predictions of personality traits based on them are carried out in the algorithms.</p>
|
34 |
+
|
35 |
+
<p>The MIF algorithm is a combination of three information analysis algorithms (AIA, VIA and TIA). This algorithm performs feature-level fusion obtained by the AIA, VIA and TIA algorithms.</p>
|
36 |
+
|
37 |
+
<p>In addition to the main task - unimodal and multimodal personality traits assessment, the features implemented in <a href="https://oceanai.readthedocs.io/en/latest/">OCEAN-AI</a> will allow researchers to solve other problems of analyzing human behavior, for example, affective state recognition.</p>
|
38 |
+
|
39 |
+
<p>The library solves practical tasks:</p>
|
40 |
+
<ol>
|
41 |
+
<li><a href="https://oceanai.readthedocs.io/en/latest/user_guide/notebooks/Pipeline_practical_task_1.html">Ranking of potential candidates by professional responsibilities</a>.</li>
|
42 |
+
<li><a href="https://oceanai.readthedocs.io/en/latest/user_guide/notebooks/Pipeline_practical_task_2.html">Predicting consumer preferences for industrial goods</a>.</li>
|
43 |
+
<li><a href="https://oceanai.readthedocs.io/ru/latest/user_guide/notebooks/Pipeline_practical_task_3.html">Forming effective work teams</a>.</li>
|
44 |
+
</ol>
|
45 |
+
|
46 |
+
<p><a href="https://oceanai.readthedocs.io/en/latest/">OCEAN-AI</a> uses the latest open-source libraries for audio, video and text processing: <a href="https://librosa.org/">librosa</a>, <a href="https://audeering.github.io/opensmile-python/">openSMILE</a>, <a href="https://pypi.org/project/opencv-python/">openCV</a>, <a href="https://google.github.io/mediapipe/getting_started/python">mediapipe</a>, <a href="https://pypi.org/project/transformers">transformers</a>.</p>
|
47 |
+
|
48 |
+
<p><a href="https://oceanai.readthedocs.io/en/latest/">OCEAN-AI</a> is written in the <a href="https://www.python.org/">python programming language</a>. Neural network models are implemented and trained using an open-source library code <a href="https://www.tensorflow.org/">TensorFlow</a>.</p>
|
49 |
+
|
50 |
+
<hr>
|
51 |
+
|
52 |
+
<h2>Research data</h2>
|
53 |
+
|
54 |
+
<p>The <a href="https://oceanai.readthedocs.io/en/latest/">OCEAN-AI</a> library was tested on two corpora:</p>
|
55 |
+
|
56 |
+
<ol>
|
57 |
+
<li>The publicly available and large-scale <a href="https://chalearnlap.cvc.uab.cat/dataset/24/description/">First Impressions V2 corpus</a>.</li>
|
58 |
+
<li>On the first publicly available Russian-language <a href="https://hci.nw.ru/en/pages/mupta-corpus">Multimodal Personality Traits Assessment (MuPTA) corpus</a>.</li>
|
59 |
+
</ol>
|
60 |
+
|
61 |
+
<hr>
|
62 |
+
|
63 |
+
<h2>Publications</h2>
|
64 |
+
|
65 |
+
<h3>Journals</h3>
|
66 |
+
<pre>
|
67 |
+
<code>
|
68 |
+
@article{ryumina24_prl,
|
69 |
+
author = {Ryumina, Elena and Markitantov, Maxim and Ryumin, Dmitry and Karpov, Alexey},
|
70 |
+
title = {Gated Siamese Fusion Network based on Multimodal Deep and Hand-Crafted Features for Personality Traits Assessment},
|
71 |
+
journal = {Pattern Recognition Letters},
|
72 |
+
volume = {185},
|
73 |
+
pages = {45--51},
|
74 |
+
year = {2024},
|
75 |
+
doi = {<a href="https://doi.org/10.1016/j.patrec.2024.07.004">https://doi.org/10.1016/j.patrec.2024.07.004</a>},
|
76 |
+
}
|
77 |
+
@article{ryumina24_eswa,
|
78 |
+
author = {Elena Ryumina and Maxim Markitantov and Dmitry Ryumin and Alexey Karpov},
|
79 |
+
title = {OCEAN-AI Framework with EmoFormer Cross-Hemiface Attention Approach for Personality Traits Assessment},
|
80 |
+
journal = {Expert Systems with Applications},
|
81 |
+
volume = {239},
|
82 |
+
pages = {122441},
|
83 |
+
year = {2024},
|
84 |
+
doi = {<a href="https://doi.org/10.1016/j.eswa.2023.122441">https://doi.org/10.1016/j.eswa.2023.122441</a>},
|
85 |
+
}
|
86 |
+
@article{ryumina22_neurocomputing,
|
87 |
+
author = {Elena Ryumina and Denis Dresvyanskiy and Alexey Karpov},
|
88 |
+
title = {In Search of a Robust Facial Expressions Recognition Model: A Large-Scale Visual Cross-Corpus Study},
|
89 |
+
journal = {Neurocomputing},
|
90 |
+
volume = {514},
|
91 |
+
pages = {435-450},
|
92 |
+
year = {2022},
|
93 |
+
doi = {<a href="https://doi.org/10.1016/j.neucom.2022.10.013">https://doi.org/10.1016/j.neucom.2022.10.013</a>},
|
94 |
+
}
|
95 |
+
</code>
|
96 |
+
</pre>
|
97 |
+
|
98 |
+
<h3>Conferences</h3>
|
99 |
+
<pre>
|
100 |
+
<code>
|
101 |
+
@inproceedings{ryumina24_interspeech,
|
102 |
+
author = {Elena Ryumina and Dmitry Ryumin and and Alexey Karpov},
|
103 |
+
title = {OCEAN-AI: Open Multimodal Framework for Personality Traits Assessment and HR-Processes Automatization},
|
104 |
+
year = {2024},
|
105 |
+
booktitle = {INTERSPEECH},
|
106 |
+
pages = {3630--3631},
|
107 |
+
doi = {<a href="https://www.isca-archive.org/interspeech_2024/ryumina24_interspeech.html#">https://www.isca-archive.org/interspeech_2024/ryumina24_interspeech.html#</a>},
|
108 |
+
}
|
109 |
+
@inproceedings{ryumina23_interspeech,
|
110 |
+
author = {Elena Ryumina and Dmitry Ryumin and Maxim Markitantov and Heysem Kaya and Alexey Karpov},
|
111 |
+
title = {Multimodal Personality Traits Assessment (MuPTA) Corpus: The Impact of Spontaneous and Read Speech},
|
112 |
+
year = {2023},
|
113 |
+
booktitle = {INTERSPEECH},
|
114 |
+
pages = {4049--4053},
|
115 |
+
doi = {<a href="https://doi.org/10.21437/Interspeech.2023-1686">https://doi.org/10.21437/Interspeech.2023-1686</a>},
|
116 |
+
}
|
117 |
+
</code>
|
118 |
+
</pre>
|
119 |
+
</div>
|
120 |
+
</div>
|
121 |
+
"""
|
app/authors.py
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: authors.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: About the authors.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
AUTHORS = """
|
9 |
+
<div style="display: flex; justify-content: center; gap: 10px;">
|
10 |
+
<div style="flex-basis: 40%;">
|
11 |
+
<a href="https://github.com/ElenaRyumina">
|
12 |
+
<img src="https://readme-typing-svg.demolab.com?font=Roboto&duration=1500&pause=100&color=3081F7&vCenter=true&multiline=true&width=435&height=70&lines=Elena+Ryumina;Artificial+Intelligence+Researcher" alt="ElenaRyumina" />
|
13 |
+
</a>
|
14 |
+
<div style="display: flex; margin-bottom: 6px;">
|
15 |
+
<a href="https://www.webofscience.com/wos/author/record/ABY-9103-2022" style="margin-right: 6px;">
|
16 |
+
<img src="https://img.shields.io/badge/Web%20of%20Science-5D34BF??&style=flat-square&logo=clarivate&logoColor=white" alt="" />
|
17 |
+
</a>
|
18 |
+
<a href="https://www.scopus.com/authid/detail.uri?authorId=57220572427" style="margin-right: 6px;">
|
19 |
+
<img src="https://img.shields.io/badge/Scopus-%23E9711C.svg?&style=flat-square&logo=scopus&logoColor=white" alt="" />
|
20 |
+
</a>
|
21 |
+
<a href="https://scholar.google.com/citations?user=DOBkQssAAAAJ" style="margin-right: 6px;">
|
22 |
+
<img src="https://img.shields.io/badge/Google%20Scholar-%234285F4.svg?&style=flat-square&logo=google-scholar&logoColor=white" alt="" />
|
23 |
+
</a>
|
24 |
+
<a href="https://orcid.org/0000-0002-4135-6949">
|
25 |
+
<img src="https://img.shields.io/badge/ORCID-0000--0002--4135--6949-green.svg?&style=flat-square&logo=orcid&logoColor=white" alt="" />
|
26 |
+
</a>
|
27 |
+
</div>
|
28 |
+
<div style="display: flex; margin-bottom: 6px;">
|
29 |
+
<a href="https://huggingface.co/ElenaRyumina" style="margin-right: 6px;">
|
30 |
+
<img src="https://img.shields.io/badge/π€-Hugging%20Face-FFD21F.svg?style=flat-square&&logoColor=white" alt="" />
|
31 |
+
</a>
|
32 |
+
<a href="mailto:ryumina_ev@mail.ru">
|
33 |
+
<img src="https://img.shields.io/badge/-Email-red?style=flat-square&logo=gmail&logoColor=white" alt="" />
|
34 |
+
</a>
|
35 |
+
</div>
|
36 |
+
<a href="https://github.com/ElenaRyumina" style="display: inline-block;">
|
37 |
+
<img src="https://github-stats-alpha.vercel.app/api?username=ElenaRyumina&cc=3081F7&tc=FFFFFF&ic=FFFFFF&bc=FFFFFF" alt="" />
|
38 |
+
</a>
|
39 |
+
<div style="display: flex;">
|
40 |
+
<img src="https://komarev.com/ghpvc/?username=ElenaRyumina&style=flat-square" alt="" />
|
41 |
+
</div>
|
42 |
+
</div>
|
43 |
+
|
44 |
+
<div style="flex-basis: 40%;">
|
45 |
+
<a href="https://github.com/DmitryRyumin">
|
46 |
+
<img src="https://readme-typing-svg.demolab.com?font=Roboto&duration=1500&pause=100&color=3081F7&vCenter=true&multiline=true&width=435&height=70&lines=Dr.+Dmitry+Ryumin;Artificial+Intelligence+Researcher" alt="DmitryRyumin" />
|
47 |
+
</a>
|
48 |
+
<div style="display: flex; margin-bottom: 6px;">
|
49 |
+
<a href="https://dmitryryumin.github.io" style="margin-right: 6px;">
|
50 |
+
<img src="https://img.shields.io/badge/Website-blue??&style=flat-square&logo=opsgenie&logoColor=white" alt="" />
|
51 |
+
</a>
|
52 |
+
<a href="https://www.webofscience.com/wos/author/record/K-7989-2018" style="margin-right: 6px;">
|
53 |
+
<img src="https://img.shields.io/badge/Web%20of%20Science-5D34BF??&style=flat-square&logo=clarivate&logoColor=white" alt="" />
|
54 |
+
</a>
|
55 |
+
<a href="https://www.scopus.com/authid/detail.uri?authorId=57191960214" style="margin-right: 6px;">
|
56 |
+
<img src="https://img.shields.io/badge/Scopus-%23E9711C.svg?&style=flat-square&logo=scopus&logoColor=white" alt="" />
|
57 |
+
</a>
|
58 |
+
<a href="https://scholar.google.com/citations?user=LrTIp5IAAAAJ" style="margin-right: 6px;">
|
59 |
+
<img src="https://img.shields.io/badge/Google%20Scholar-%234285F4.svg?&style=flat-square&logo=google-scholar&logoColor=white" alt="" />
|
60 |
+
</a>
|
61 |
+
<a href="https://orcid.org/0000-0002-7935-0569">
|
62 |
+
<img src="https://img.shields.io/badge/ORCID-0000--0002--7935--0569-green.svg?&style=flat-square&logo=orcid&logoColor=white" alt="" />
|
63 |
+
</a>
|
64 |
+
</div>
|
65 |
+
<div style="display: flex; margin-bottom: 6px;">
|
66 |
+
<a href="https://huggingface.co/DmitryRyumin" style="margin-right: 6px;">
|
67 |
+
<img src="https://img.shields.io/badge/π€-Hugging%20Face-FFD21F.svg?style=flat-square&&logoColor=white" alt="" />
|
68 |
+
</a>
|
69 |
+
<a href="https://t.me/dmitry_ryumin" style="margin-right: 6px;">
|
70 |
+
<img src="https://img.shields.io/badge/Telegram-2CA5E0?style=flat-square&logo=telegram&logoColor=white" alt="" />
|
71 |
+
</a>
|
72 |
+
<a href="mailto:neweraairesearch@gmail.com">
|
73 |
+
<img src="https://img.shields.io/badge/-Email-red?style=flat-square&logo=gmail&logoColor=white" alt=""/>
|
74 |
+
</a>
|
75 |
+
</div>
|
76 |
+
<a href="https://github.com/DmitryRyumin" style="display: inline-block;">
|
77 |
+
<img src="https://github-stats-alpha.vercel.app/api?username=DmitryRyumin&cc=3081F7&tc=FFFFFF&ic=FFFFFF&bc=FFFFFF" alt="" />
|
78 |
+
</a>
|
79 |
+
<div style="display: flex; flex-wrap: wrap; align-items: flex-start;">
|
80 |
+
<img src="https://custom-icon-badges.demolab.com/badge/dynamic/json?style=flat-square&logo=fire&logoColor=fff&color=orange&label=GitHub%20streak&query=%24.currentStreak.length&suffix=%20days&url=https%3A%2F%2Fstreak-stats.demolab.com%2F%3Fuser%3Ddmitryryumin%26type%3Djson" alt="" style="margin-right: 6px; margin-bottom: 6px;" />
|
81 |
+
<img src="https://komarev.com/ghpvc/?username=DmitryRyumin&style=flat-square" alt="" style="margin-bottom: 6px;" />
|
82 |
+
</div>
|
83 |
+
</div>
|
84 |
+
</div>
|
85 |
+
<div style="display: flex; justify-content: center; align-items: center; margin-top: 10px;">
|
86 |
+
<img src="https://huggingface.co/spaces/ElenaRyumina/OCEANAI/resolve/main/images/AA.jpg" alt="Professor Alexey Karpov" style="margin-right: 20px; border-radius: 50%; width: 80px; height: 80px; object-fit: cover;">
|
87 |
+
<div style="flex-basis: 40%;">
|
88 |
+
<a href="https://hci.nw.ru/en/employees/1" style="display: inline-block;">
|
89 |
+
<img src="https://readme-typing-svg.demolab.com?font=Roboto&duration=1500&pause=100&color=3081F7&vCenter=true&multiline=true&width=435&height=70&lines=Dr.+Sc.+Alexey+Karpov;Team+Leader" alt="AlexeyKarpov" />
|
90 |
+
</a>
|
91 |
+
<div style="display: flex; margin-bottom: 6px;">
|
92 |
+
<a href="https://www.webofscience.com/wos/author/record/A-8905-2012" style="margin-right: 6px;">
|
93 |
+
<img src="https://img.shields.io/badge/Web%20of%20Science-5D34BF??&style=flat-square&logo=clarivate&logoColor=white" alt="" />
|
94 |
+
</a>
|
95 |
+
<a href="https://www.scopus.com/authid/detail.uri?authorId=57219469958" style="margin-right: 6px;">
|
96 |
+
<img src="https://img.shields.io/badge/Scopus-%23E9711C.svg?&style=flat-square&logo=scopus&logoColor=white" alt="" />
|
97 |
+
</a>
|
98 |
+
<a href="https://scholar.google.com/citations?user=Q0C3f1oAAAAJ" style="margin-right: 6px;">
|
99 |
+
<img src="https://img.shields.io/badge/Google%20Scholar-%234285F4.svg?&style=flat-square&logo=google-scholar&logoColor=white" alt="" />
|
100 |
+
</a>
|
101 |
+
<a href="https://orcid.org/0000-0003-3424-652X" style="margin-right: 6px;">
|
102 |
+
<img src="https://img.shields.io/badge/ORCID-0000--0003--3424--652X-green.svg?&style=flat-square&logo=orcid&logoColor=white" alt="" />
|
103 |
+
</a>
|
104 |
+
<a href="mailto:karpov@iias.spb.su">
|
105 |
+
<img src="https://img.shields.io/badge/-Email-red?style=flat-square&logo=gmail&logoColor=white" alt="" />
|
106 |
+
</a>
|
107 |
+
</div>
|
108 |
+
</div>
|
109 |
+
</div>
|
110 |
+
"""
|
app/components.py
ADDED
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: components.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Utility functions for creating Gradio components.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
from typing import Union, List, Callable, Optional, Literal
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
from app.config import config_data
|
13 |
+
|
14 |
+
|
15 |
+
def html_message(
|
16 |
+
message: str = "", error: bool = True, visible: bool = True
|
17 |
+
) -> gr.HTML:
|
18 |
+
css_class = "noti_err" if not error else "noti_true"
|
19 |
+
|
20 |
+
return gr.HTML(value=f"<h3 class='{css_class}'>{message}</h3>", visible=visible)
|
21 |
+
|
22 |
+
|
23 |
+
def files_create_ui(
|
24 |
+
value: Union[str, List[str], Callable, None] = None,
|
25 |
+
file_count: str = "multiple",
|
26 |
+
file_types: List = ["video"],
|
27 |
+
label: str = config_data.OtherMessages_VIDEO_FILES[
|
28 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
29 |
+
],
|
30 |
+
show_label: bool = True,
|
31 |
+
interactive: bool = True,
|
32 |
+
visible: bool = True,
|
33 |
+
elem_classes: Optional[str] = "files-container",
|
34 |
+
) -> gr.File:
|
35 |
+
return gr.File(
|
36 |
+
value=value,
|
37 |
+
file_count=file_count,
|
38 |
+
file_types=file_types,
|
39 |
+
label=label,
|
40 |
+
show_label=show_label,
|
41 |
+
interactive=interactive,
|
42 |
+
visible=visible,
|
43 |
+
elem_classes=elem_classes,
|
44 |
+
)
|
45 |
+
|
46 |
+
|
47 |
+
def video_create_ui(
|
48 |
+
value: Optional[str] = None,
|
49 |
+
label: str = config_data.OtherMessages_VIDEO_PLAYER[
|
50 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
51 |
+
],
|
52 |
+
file_name: Optional[str] = None,
|
53 |
+
show_label: bool = True,
|
54 |
+
interactive: bool = False,
|
55 |
+
visible: bool = True,
|
56 |
+
elem_classes: Optional[str] = "files-container",
|
57 |
+
) -> gr.Video:
|
58 |
+
if file_name is not None:
|
59 |
+
label += f" ({file_name})"
|
60 |
+
|
61 |
+
return gr.Video(
|
62 |
+
value=value,
|
63 |
+
label=label,
|
64 |
+
show_label=show_label,
|
65 |
+
interactive=interactive,
|
66 |
+
visible=visible,
|
67 |
+
elem_classes=elem_classes,
|
68 |
+
)
|
69 |
+
|
70 |
+
|
71 |
+
def dataframe(
|
72 |
+
headers: Optional[List] = None,
|
73 |
+
values: Optional[List] = None,
|
74 |
+
height: int = 500,
|
75 |
+
wrap: bool = True,
|
76 |
+
visible: bool = True,
|
77 |
+
interactive: bool = False,
|
78 |
+
elem_classes: Optional[str] = "dataframe",
|
79 |
+
) -> gr.Dataframe:
|
80 |
+
if headers is None or values is None:
|
81 |
+
datatype = "str"
|
82 |
+
else:
|
83 |
+
datatype = ["markdown"] * len(headers)
|
84 |
+
|
85 |
+
return gr.Dataframe(
|
86 |
+
value=values,
|
87 |
+
headers=headers,
|
88 |
+
datatype=datatype,
|
89 |
+
max_height=height,
|
90 |
+
wrap=wrap,
|
91 |
+
visible=visible,
|
92 |
+
interactive=interactive,
|
93 |
+
elem_classes=elem_classes,
|
94 |
+
)
|
95 |
+
|
96 |
+
|
97 |
+
def button(
|
98 |
+
value: str = "",
|
99 |
+
interactive: bool = True,
|
100 |
+
scale: int = 3,
|
101 |
+
icon: Optional[str] = None,
|
102 |
+
visible: bool = True,
|
103 |
+
elem_classes: Optional[str] = None,
|
104 |
+
) -> gr.Button:
|
105 |
+
return gr.Button(
|
106 |
+
value=value,
|
107 |
+
interactive=interactive,
|
108 |
+
scale=scale,
|
109 |
+
icon=icon,
|
110 |
+
visible=visible,
|
111 |
+
elem_classes=elem_classes,
|
112 |
+
)
|
113 |
+
|
114 |
+
|
115 |
+
def radio_create_ui(
|
116 |
+
value: Union[str, int, float, Callable, None],
|
117 |
+
label: str,
|
118 |
+
choices: Union[List, None],
|
119 |
+
info: str,
|
120 |
+
interactive: bool,
|
121 |
+
visible: bool,
|
122 |
+
):
|
123 |
+
return gr.Radio(
|
124 |
+
value=value,
|
125 |
+
label=label,
|
126 |
+
choices=choices,
|
127 |
+
info=info,
|
128 |
+
show_label=True,
|
129 |
+
container=True,
|
130 |
+
interactive=interactive,
|
131 |
+
visible=visible,
|
132 |
+
)
|
133 |
+
|
134 |
+
|
135 |
+
def number_create_ui(
|
136 |
+
value: float = 0.5,
|
137 |
+
minimum: float = 0.0,
|
138 |
+
maximum: float = 1.0,
|
139 |
+
step: float = 0.01,
|
140 |
+
label: Optional[str] = None,
|
141 |
+
info: Optional[str] = None,
|
142 |
+
show_label: bool = True,
|
143 |
+
interactive: bool = True,
|
144 |
+
visible: bool = False,
|
145 |
+
render: bool = True,
|
146 |
+
elem_classes: Optional[str] = "number-container",
|
147 |
+
):
|
148 |
+
return gr.Number(
|
149 |
+
value=value,
|
150 |
+
minimum=minimum,
|
151 |
+
maximum=maximum,
|
152 |
+
step=step,
|
153 |
+
label=label,
|
154 |
+
info=info,
|
155 |
+
show_label=show_label,
|
156 |
+
interactive=interactive,
|
157 |
+
visible=visible,
|
158 |
+
render=render,
|
159 |
+
elem_classes=elem_classes,
|
160 |
+
)
|
161 |
+
|
162 |
+
|
163 |
+
def dropdown_create_ui(
|
164 |
+
label: Optional[str] = None,
|
165 |
+
info: Optional[str] = None,
|
166 |
+
choices: Optional[List[str]] = None,
|
167 |
+
value: Optional[List[str]] = None,
|
168 |
+
multiselect: bool = False,
|
169 |
+
show_label: bool = True,
|
170 |
+
interactive: bool = True,
|
171 |
+
visible: bool = True,
|
172 |
+
render: bool = True,
|
173 |
+
elem_classes: Optional[str] = None,
|
174 |
+
) -> gr.Dropdown:
|
175 |
+
return gr.Dropdown(
|
176 |
+
choices=choices,
|
177 |
+
value=value,
|
178 |
+
multiselect=multiselect,
|
179 |
+
label=label,
|
180 |
+
info=info,
|
181 |
+
show_label=show_label,
|
182 |
+
interactive=interactive,
|
183 |
+
visible=visible,
|
184 |
+
render=render,
|
185 |
+
elem_classes=elem_classes,
|
186 |
+
)
|
187 |
+
|
188 |
+
|
189 |
+
def textbox_create_ui(
|
190 |
+
value: Optional[str] = None,
|
191 |
+
type: Literal["text", "password", "email"] = "text",
|
192 |
+
label: Optional[str] = None,
|
193 |
+
placeholder: Optional[str] = None,
|
194 |
+
info: Optional[str] = None,
|
195 |
+
max_lines: int = 1,
|
196 |
+
show_label: bool = True,
|
197 |
+
interactive: bool = True,
|
198 |
+
visible: bool = True,
|
199 |
+
show_copy_button: bool = True,
|
200 |
+
scale: int = 1,
|
201 |
+
container: bool = False,
|
202 |
+
):
|
203 |
+
return gr.Textbox(
|
204 |
+
value=value,
|
205 |
+
type=type,
|
206 |
+
label=label,
|
207 |
+
placeholder=placeholder,
|
208 |
+
info=info,
|
209 |
+
max_lines=max_lines,
|
210 |
+
show_label=show_label,
|
211 |
+
interactive=interactive,
|
212 |
+
visible=visible,
|
213 |
+
show_copy_button=show_copy_button,
|
214 |
+
scale=scale,
|
215 |
+
container=container,
|
216 |
+
)
|
app/config.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: config.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Configuration module for handling settings.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import toml
|
9 |
+
from typing import Callable, Dict
|
10 |
+
from types import SimpleNamespace
|
11 |
+
|
12 |
+
CONFIG_NAME = "config.toml"
|
13 |
+
|
14 |
+
|
15 |
+
def flatten_dict(prefix: str, d: Dict) -> Dict:
|
16 |
+
result = {}
|
17 |
+
|
18 |
+
for k, v in d.items():
|
19 |
+
result.update(flatten_dict(f"{prefix}{k}_", v) if isinstance(v, dict) else {f"{prefix}{k}": v})
|
20 |
+
|
21 |
+
return result
|
22 |
+
|
23 |
+
|
24 |
+
def load_tab_creators(file_path: str, available_functions: Callable) -> Dict:
|
25 |
+
config = toml.load(file_path)
|
26 |
+
tab_creators_data = config.get("TabCreators", {})
|
27 |
+
|
28 |
+
return {key: available_functions[value] for key, value in tab_creators_data.items()}
|
29 |
+
|
30 |
+
|
31 |
+
def load_config(file_path: str) -> SimpleNamespace:
|
32 |
+
config = toml.load(file_path)
|
33 |
+
config_data = flatten_dict("", config)
|
34 |
+
|
35 |
+
return SimpleNamespace(**config_data)
|
36 |
+
|
37 |
+
|
38 |
+
config_data = load_config(CONFIG_NAME)
|
app/description.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: description.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Project description for the Gradio app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
# Importing necessary components for the Gradio app
|
9 |
+
from app.config import config_data
|
10 |
+
|
11 |
+
TEMPLATE = """\
|
12 |
+
<h1><a href="https://github.com/aimclub/OCEANAI" target="_blank">OCEAN-AI</a> {description}.</h1>
|
13 |
+
|
14 |
+
<div class="app-flex-container">
|
15 |
+
<img src="https://img.shields.io/badge/version-v{version}-rc0" alt="{version_label}">
|
16 |
+
<a href='https://github.com/DmitryRyumin/OCEANAI' target='_blank'><img src='https://img.shields.io/github/stars/DmitryRyumin/OCEANAI?style=flat' alt='GitHub' /></a>
|
17 |
+
</div>
|
18 |
+
|
19 |
+
The models used in OCEAN-AI were trained on 15-second clips from the ChaLearn First Impression v2 dataset.
|
20 |
+
For more reliable predictions, 15-second videos are recommended, but OCEAN-AI can analyze videos of any length.
|
21 |
+
Due to limited computational resources on HuggingFace, we provide six 3-second videos as examples.
|
22 |
+
"""
|
23 |
+
|
24 |
+
DESCRIPTIONS = [
|
25 |
+
TEMPLATE.format(
|
26 |
+
description=config_data.InformationMessages_DESCRIPTIONS[0],
|
27 |
+
version=config_data.AppSettings_APP_VERSION,
|
28 |
+
version_label=config_data.Labels_APP_VERSION[0],
|
29 |
+
),
|
30 |
+
TEMPLATE.format(
|
31 |
+
description=config_data.InformationMessages_DESCRIPTIONS[1],
|
32 |
+
version=config_data.AppSettings_APP_VERSION,
|
33 |
+
version_label=config_data.Labels_APP_VERSION[1],
|
34 |
+
),
|
35 |
+
]
|
app/description_steps.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: description_steps.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Project steps description for the Gradio app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
# Importing necessary components for the Gradio app
|
9 |
+
from app.config import config_data
|
10 |
+
|
11 |
+
STEPS_TEMPLATE = """\
|
12 |
+
<h2 align="center">{text}</h2>
|
13 |
+
"""
|
14 |
+
|
15 |
+
STEP_1 = [
|
16 |
+
STEPS_TEMPLATE.format(
|
17 |
+
text=config_data.InformationMessages_STEP_1[0],
|
18 |
+
),
|
19 |
+
STEPS_TEMPLATE.format(
|
20 |
+
text=config_data.InformationMessages_STEP_1[1],
|
21 |
+
),
|
22 |
+
]
|
23 |
+
|
24 |
+
STEP_2 = [
|
25 |
+
STEPS_TEMPLATE.format(
|
26 |
+
text=config_data.InformationMessages_STEP_2[0],
|
27 |
+
),
|
28 |
+
STEPS_TEMPLATE.format(
|
29 |
+
text=config_data.InformationMessages_STEP_2[1],
|
30 |
+
),
|
31 |
+
]
|
app/event_handlers/__init__.py
ADDED
File without changes
|
app/event_handlers/calculate_practical_tasks.py
ADDED
@@ -0,0 +1,649 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: calculate_practical_tasks.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for Gradio app to calculate practical tasks.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
from app.oceanai_init import b5
|
9 |
+
import re
|
10 |
+
import gradio as gr
|
11 |
+
from pathlib import Path
|
12 |
+
|
13 |
+
# Importing necessary components for the Gradio app
|
14 |
+
from app.config import config_data
|
15 |
+
from app.video_metadata import video_metadata
|
16 |
+
from app.mbti_description import MBTI_DESCRIPTION, MBTI_DATA
|
17 |
+
from app.utils import (
|
18 |
+
read_csv_file,
|
19 |
+
apply_rounding_and_rename_columns,
|
20 |
+
preprocess_scores_df,
|
21 |
+
)
|
22 |
+
from app.components import (
|
23 |
+
html_message,
|
24 |
+
dataframe,
|
25 |
+
files_create_ui,
|
26 |
+
video_create_ui,
|
27 |
+
textbox_create_ui,
|
28 |
+
)
|
29 |
+
|
30 |
+
|
31 |
+
def colleague_type(subtask):
|
32 |
+
return "minor" if "junior" in subtask.lower() else "major"
|
33 |
+
|
34 |
+
|
35 |
+
def consumer_preferences(subtask):
|
36 |
+
return (
|
37 |
+
config_data.Filenames_CAR_CHARACTERISTICS
|
38 |
+
if "mobile device" in subtask.lower()
|
39 |
+
else config_data.Filenames_MDA_CATEGORIES
|
40 |
+
)
|
41 |
+
|
42 |
+
|
43 |
+
def remove_parentheses(s):
|
44 |
+
return re.sub(r"\s*\([^)]*\)", "", s)
|
45 |
+
|
46 |
+
|
47 |
+
def extract_text_in_parentheses(s):
|
48 |
+
result = re.search(r"\(([^)]+)\)", s)
|
49 |
+
if result:
|
50 |
+
return result.group(1)
|
51 |
+
else:
|
52 |
+
return None
|
53 |
+
|
54 |
+
|
55 |
+
def compare_strings(original, comparison, prev=False):
|
56 |
+
result = []
|
57 |
+
prev_class = None
|
58 |
+
|
59 |
+
for orig_char, comp_char in zip(original, comparison):
|
60 |
+
curr_class = "true" if orig_char == comp_char else "err"
|
61 |
+
if not prev:
|
62 |
+
result.append(f"<span class='{curr_class}'>{comp_char}</span>")
|
63 |
+
else:
|
64 |
+
if curr_class != prev_class:
|
65 |
+
result.append("</span>" if prev_class else "")
|
66 |
+
result.append(f"<span class='{curr_class}'>")
|
67 |
+
prev_class = curr_class
|
68 |
+
result.append(comp_char)
|
69 |
+
|
70 |
+
return f"<span class='wrapper_mbti'>{''.join(result + [f'</span>' if prev_class else ''])}</span>"
|
71 |
+
|
72 |
+
|
73 |
+
def create_person_metadata(person_id, files, video_metadata):
|
74 |
+
if (
|
75 |
+
Path(files[person_id]).name in video_metadata
|
76 |
+
and config_data.Settings_SHOW_VIDEO_METADATA
|
77 |
+
):
|
78 |
+
person_metadata_list = video_metadata[Path(files[person_id]).name]
|
79 |
+
return (
|
80 |
+
gr.Column(visible=True),
|
81 |
+
gr.Row(visible=True),
|
82 |
+
gr.Row(visible=True),
|
83 |
+
gr.Image(visible=True),
|
84 |
+
textbox_create_ui(
|
85 |
+
person_metadata_list[0],
|
86 |
+
"text",
|
87 |
+
"First name",
|
88 |
+
None,
|
89 |
+
None,
|
90 |
+
1,
|
91 |
+
True,
|
92 |
+
False,
|
93 |
+
True,
|
94 |
+
False,
|
95 |
+
1,
|
96 |
+
False,
|
97 |
+
),
|
98 |
+
gr.Row(visible=True),
|
99 |
+
gr.Image(visible=True),
|
100 |
+
textbox_create_ui(
|
101 |
+
person_metadata_list[1],
|
102 |
+
"text",
|
103 |
+
"Last name",
|
104 |
+
None,
|
105 |
+
None,
|
106 |
+
1,
|
107 |
+
True,
|
108 |
+
False,
|
109 |
+
True,
|
110 |
+
False,
|
111 |
+
1,
|
112 |
+
False,
|
113 |
+
),
|
114 |
+
gr.Row(visible=True),
|
115 |
+
gr.Row(visible=True),
|
116 |
+
gr.Image(visible=True),
|
117 |
+
textbox_create_ui(
|
118 |
+
person_metadata_list[2],
|
119 |
+
"email",
|
120 |
+
"Email",
|
121 |
+
None,
|
122 |
+
None,
|
123 |
+
1,
|
124 |
+
True,
|
125 |
+
False,
|
126 |
+
True,
|
127 |
+
False,
|
128 |
+
1,
|
129 |
+
False,
|
130 |
+
),
|
131 |
+
gr.Row(visible=True),
|
132 |
+
gr.Image(visible=True),
|
133 |
+
textbox_create_ui(
|
134 |
+
person_metadata_list[3],
|
135 |
+
"text",
|
136 |
+
"Phone number",
|
137 |
+
None,
|
138 |
+
None,
|
139 |
+
1,
|
140 |
+
True,
|
141 |
+
False,
|
142 |
+
True,
|
143 |
+
False,
|
144 |
+
1,
|
145 |
+
False,
|
146 |
+
),
|
147 |
+
)
|
148 |
+
else:
|
149 |
+
return (
|
150 |
+
gr.Column(visible=False),
|
151 |
+
gr.Row(visible=False),
|
152 |
+
gr.Row(visible=False),
|
153 |
+
gr.Image(visible=False),
|
154 |
+
textbox_create_ui(visible=False),
|
155 |
+
gr.Row(visible=False),
|
156 |
+
gr.Image(visible=False),
|
157 |
+
textbox_create_ui(visible=False),
|
158 |
+
gr.Row(visible=False),
|
159 |
+
gr.Row(visible=False),
|
160 |
+
gr.Image(visible=False),
|
161 |
+
textbox_create_ui(visible=False),
|
162 |
+
gr.Row(visible=False),
|
163 |
+
gr.Image(visible=False),
|
164 |
+
textbox_create_ui(visible=False),
|
165 |
+
)
|
166 |
+
|
167 |
+
|
168 |
+
def event_handler_calculate_practical_task_blocks(
|
169 |
+
files,
|
170 |
+
practical_subtasks,
|
171 |
+
pt_scores,
|
172 |
+
dropdown_mbti,
|
173 |
+
threshold_mbti,
|
174 |
+
threshold_professional_skills,
|
175 |
+
dropdown_professional_skills,
|
176 |
+
target_score_ope,
|
177 |
+
target_score_con,
|
178 |
+
target_score_ext,
|
179 |
+
target_score_agr,
|
180 |
+
target_score_nneu,
|
181 |
+
equal_coefficient,
|
182 |
+
number_priority,
|
183 |
+
number_importance_traits,
|
184 |
+
threshold_consumer_preferences,
|
185 |
+
number_openness,
|
186 |
+
number_conscientiousness,
|
187 |
+
number_extraversion,
|
188 |
+
number_agreeableness,
|
189 |
+
number_non_neuroticism,
|
190 |
+
):
|
191 |
+
if practical_subtasks.lower() == "16 personality types of mbti":
|
192 |
+
df_correlation_coefficients = read_csv_file(config_data.Links_MBTI)
|
193 |
+
|
194 |
+
pt_scores_copy = pt_scores.iloc[:, 1:].copy()
|
195 |
+
|
196 |
+
preprocess_scores_df(pt_scores_copy, config_data.Dataframes_PT_SCORES[0][0])
|
197 |
+
|
198 |
+
b5._professional_match(
|
199 |
+
df_files=pt_scores_copy,
|
200 |
+
correlation_coefficients=df_correlation_coefficients,
|
201 |
+
personality_type=remove_parentheses(dropdown_mbti),
|
202 |
+
threshold=threshold_mbti,
|
203 |
+
out=False,
|
204 |
+
)
|
205 |
+
|
206 |
+
df = apply_rounding_and_rename_columns(b5.df_files_MBTI_job_match_)
|
207 |
+
|
208 |
+
df_hidden = df.drop(
|
209 |
+
columns=config_data.Settings_SHORT_PROFESSIONAL_SKILLS
|
210 |
+
+ config_data.Settings_DROPDOWN_MBTI_DEL_COLS
|
211 |
+
)
|
212 |
+
|
213 |
+
df_hidden.rename(
|
214 |
+
columns={
|
215 |
+
"Path": "Filename",
|
216 |
+
"MBTI": "Personality Type",
|
217 |
+
"MBTI_Score": "Personality Type Score",
|
218 |
+
},
|
219 |
+
inplace=True,
|
220 |
+
)
|
221 |
+
|
222 |
+
df_hidden.to_csv(config_data.Filenames_MBTI_JOB)
|
223 |
+
|
224 |
+
df_hidden.reset_index(inplace=True)
|
225 |
+
|
226 |
+
person_id = int(df_hidden.iloc[0][config_data.Dataframes_PT_SCORES[0][0]]) - 1
|
227 |
+
|
228 |
+
short_mbti = extract_text_in_parentheses(dropdown_mbti)
|
229 |
+
mbti_values = df_hidden["Personality Type"].tolist()
|
230 |
+
|
231 |
+
df_hidden["Personality Type"] = [
|
232 |
+
compare_strings(short_mbti, mbti, False) for mbti in mbti_values
|
233 |
+
]
|
234 |
+
|
235 |
+
person_metadata = create_person_metadata(person_id, files, video_metadata)
|
236 |
+
|
237 |
+
existing_tuple = (
|
238 |
+
gr.Row(visible=True),
|
239 |
+
gr.Column(visible=True),
|
240 |
+
dataframe(
|
241 |
+
headers=df_hidden.columns.tolist(),
|
242 |
+
values=df_hidden.values.tolist(),
|
243 |
+
visible=True,
|
244 |
+
),
|
245 |
+
files_create_ui(
|
246 |
+
config_data.Filenames_MBTI_JOB,
|
247 |
+
"single",
|
248 |
+
[".csv"],
|
249 |
+
config_data.OtherMessages_EXPORT_MBTI,
|
250 |
+
True,
|
251 |
+
False,
|
252 |
+
True,
|
253 |
+
"csv-container",
|
254 |
+
),
|
255 |
+
gr.Accordion(
|
256 |
+
label=config_data.Labels_NOTE_MBTI_LABEL,
|
257 |
+
open=False,
|
258 |
+
visible=True,
|
259 |
+
),
|
260 |
+
gr.HTML(value=MBTI_DESCRIPTION, visible=True),
|
261 |
+
dataframe(
|
262 |
+
headers=MBTI_DATA.columns.tolist(),
|
263 |
+
values=MBTI_DATA.values.tolist(),
|
264 |
+
visible=True,
|
265 |
+
elem_classes="mbti-dataframe",
|
266 |
+
),
|
267 |
+
gr.Column(visible=True),
|
268 |
+
video_create_ui(
|
269 |
+
value=files[person_id],
|
270 |
+
file_name=Path(files[person_id]).name,
|
271 |
+
label="Best Person ID - " + str(person_id + 1),
|
272 |
+
visible=True,
|
273 |
+
elem_classes="video-sorted-container",
|
274 |
+
),
|
275 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
276 |
+
)
|
277 |
+
|
278 |
+
return existing_tuple[:-1] + person_metadata + existing_tuple[-1:]
|
279 |
+
elif practical_subtasks.lower() == "professional groups":
|
280 |
+
sum_weights = sum(
|
281 |
+
[
|
282 |
+
number_openness,
|
283 |
+
number_conscientiousness,
|
284 |
+
number_extraversion,
|
285 |
+
number_agreeableness,
|
286 |
+
number_non_neuroticism,
|
287 |
+
]
|
288 |
+
)
|
289 |
+
|
290 |
+
if sum_weights != 100:
|
291 |
+
gr.Warning(config_data.InformationMessages_SUM_WEIGHTS.format(sum_weights))
|
292 |
+
|
293 |
+
return (
|
294 |
+
gr.Row(visible=False),
|
295 |
+
gr.Column(visible=False),
|
296 |
+
dataframe(visible=False),
|
297 |
+
files_create_ui(
|
298 |
+
None,
|
299 |
+
"single",
|
300 |
+
[".csv"],
|
301 |
+
config_data.OtherMessages_EXPORT_PS,
|
302 |
+
True,
|
303 |
+
False,
|
304 |
+
False,
|
305 |
+
"csv-container",
|
306 |
+
),
|
307 |
+
gr.Accordion(visible=False),
|
308 |
+
gr.HTML(visible=False),
|
309 |
+
dataframe(visible=False),
|
310 |
+
gr.Column(visible=False),
|
311 |
+
video_create_ui(visible=False),
|
312 |
+
gr.Column(visible=False),
|
313 |
+
gr.Row(visible=False),
|
314 |
+
gr.Row(visible=False),
|
315 |
+
gr.Image(visible=False),
|
316 |
+
textbox_create_ui(visible=False),
|
317 |
+
gr.Row(visible=False),
|
318 |
+
gr.Image(visible=False),
|
319 |
+
textbox_create_ui(visible=False),
|
320 |
+
gr.Row(visible=False),
|
321 |
+
gr.Row(visible=False),
|
322 |
+
gr.Image(visible=False),
|
323 |
+
textbox_create_ui(visible=False),
|
324 |
+
gr.Row(visible=False),
|
325 |
+
gr.Image(visible=False),
|
326 |
+
textbox_create_ui(visible=False),
|
327 |
+
html_message(
|
328 |
+
config_data.InformationMessages_SUM_WEIGHTS.format(sum_weights),
|
329 |
+
False,
|
330 |
+
True,
|
331 |
+
),
|
332 |
+
)
|
333 |
+
else:
|
334 |
+
b5._candidate_ranking(
|
335 |
+
df_files=pt_scores.iloc[:, 1:],
|
336 |
+
weigths_openness=number_openness,
|
337 |
+
weigths_conscientiousness=number_conscientiousness,
|
338 |
+
weigths_extraversion=number_extraversion,
|
339 |
+
weigths_agreeableness=number_agreeableness,
|
340 |
+
weigths_non_neuroticism=number_non_neuroticism,
|
341 |
+
out=False,
|
342 |
+
)
|
343 |
+
|
344 |
+
df = apply_rounding_and_rename_columns(b5.df_files_ranking_)
|
345 |
+
|
346 |
+
df_hidden = df.drop(columns=config_data.Settings_SHORT_PROFESSIONAL_SKILLS)
|
347 |
+
|
348 |
+
df_hidden.to_csv(config_data.Filenames_POTENTIAL_CANDIDATES)
|
349 |
+
|
350 |
+
df_hidden.reset_index(inplace=True)
|
351 |
+
|
352 |
+
person_id = (
|
353 |
+
int(df_hidden.iloc[0][config_data.Dataframes_PT_SCORES[0][0]]) - 1
|
354 |
+
)
|
355 |
+
|
356 |
+
person_metadata = create_person_metadata(person_id, files, video_metadata)
|
357 |
+
|
358 |
+
existing_tuple = (
|
359 |
+
gr.Row(visible=True),
|
360 |
+
gr.Column(visible=True),
|
361 |
+
dataframe(
|
362 |
+
headers=df_hidden.columns.tolist(),
|
363 |
+
values=df_hidden.values.tolist(),
|
364 |
+
visible=True,
|
365 |
+
),
|
366 |
+
files_create_ui(
|
367 |
+
config_data.Filenames_POTENTIAL_CANDIDATES,
|
368 |
+
"single",
|
369 |
+
[".csv"],
|
370 |
+
config_data.OtherMessages_EXPORT_PG,
|
371 |
+
True,
|
372 |
+
False,
|
373 |
+
True,
|
374 |
+
"csv-container",
|
375 |
+
),
|
376 |
+
gr.Accordion(visible=False),
|
377 |
+
gr.HTML(visible=False),
|
378 |
+
dataframe(visible=False),
|
379 |
+
gr.Column(visible=True),
|
380 |
+
video_create_ui(
|
381 |
+
value=files[person_id],
|
382 |
+
file_name=Path(files[person_id]).name,
|
383 |
+
label="Best Person ID - " + str(person_id + 1),
|
384 |
+
visible=True,
|
385 |
+
elem_classes="video-sorted-container",
|
386 |
+
),
|
387 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
388 |
+
)
|
389 |
+
|
390 |
+
return existing_tuple[:-1] + person_metadata + existing_tuple[-1:]
|
391 |
+
elif practical_subtasks.lower() == "professional skills":
|
392 |
+
df_professional_skills = read_csv_file(config_data.Links_PROFESSIONAL_SKILLS)
|
393 |
+
|
394 |
+
b5._priority_skill_calculation(
|
395 |
+
df_files=pt_scores.iloc[:, 1:],
|
396 |
+
correlation_coefficients=df_professional_skills,
|
397 |
+
threshold=threshold_professional_skills,
|
398 |
+
out=False,
|
399 |
+
)
|
400 |
+
|
401 |
+
df = apply_rounding_and_rename_columns(b5.df_files_priority_skill_)
|
402 |
+
|
403 |
+
professional_skills_list = (
|
404 |
+
config_data.Settings_DROPDOWN_PROFESSIONAL_SKILLS.copy()
|
405 |
+
)
|
406 |
+
|
407 |
+
professional_skills_list.remove(dropdown_professional_skills)
|
408 |
+
|
409 |
+
df_hidden = df.drop(
|
410 |
+
columns=config_data.Settings_SHORT_PROFESSIONAL_SKILLS
|
411 |
+
+ professional_skills_list
|
412 |
+
)
|
413 |
+
|
414 |
+
df_hidden.to_csv(config_data.Filenames_PT_SKILLS_SCORES)
|
415 |
+
|
416 |
+
df_hidden.reset_index(inplace=True)
|
417 |
+
|
418 |
+
df_hidden = df_hidden.sort_values(
|
419 |
+
by=[dropdown_professional_skills], ascending=False
|
420 |
+
)
|
421 |
+
|
422 |
+
person_id = int(df_hidden.iloc[0][config_data.Dataframes_PT_SCORES[0][0]]) - 1
|
423 |
+
|
424 |
+
person_metadata = create_person_metadata(person_id, files, video_metadata)
|
425 |
+
|
426 |
+
existing_tuple = (
|
427 |
+
gr.Row(visible=True),
|
428 |
+
gr.Column(visible=True),
|
429 |
+
dataframe(
|
430 |
+
headers=df_hidden.columns.tolist(),
|
431 |
+
values=df_hidden.values.tolist(),
|
432 |
+
visible=True,
|
433 |
+
),
|
434 |
+
files_create_ui(
|
435 |
+
config_data.Filenames_PT_SKILLS_SCORES,
|
436 |
+
"single",
|
437 |
+
[".csv"],
|
438 |
+
config_data.OtherMessages_EXPORT_PS,
|
439 |
+
True,
|
440 |
+
False,
|
441 |
+
True,
|
442 |
+
"csv-container",
|
443 |
+
),
|
444 |
+
gr.Accordion(visible=False),
|
445 |
+
gr.HTML(visible=False),
|
446 |
+
dataframe(visible=False),
|
447 |
+
gr.Column(visible=True),
|
448 |
+
video_create_ui(
|
449 |
+
value=files[person_id],
|
450 |
+
file_name=Path(files[person_id]).name,
|
451 |
+
label="Best Person ID - " + str(person_id + 1),
|
452 |
+
visible=True,
|
453 |
+
elem_classes="video-sorted-container",
|
454 |
+
),
|
455 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
456 |
+
)
|
457 |
+
|
458 |
+
return existing_tuple[:-1] + person_metadata + existing_tuple[-1:]
|
459 |
+
elif (
|
460 |
+
practical_subtasks.lower() == "finding a suitable junior colleague"
|
461 |
+
or practical_subtasks.lower() == "finding a suitable senior colleague"
|
462 |
+
):
|
463 |
+
df_correlation_coefficients = read_csv_file(
|
464 |
+
config_data.Links_FINDING_COLLEAGUE, ["ID"]
|
465 |
+
)
|
466 |
+
|
467 |
+
b5._colleague_ranking(
|
468 |
+
df_files=pt_scores.iloc[:, 1:],
|
469 |
+
correlation_coefficients=df_correlation_coefficients,
|
470 |
+
target_scores=[
|
471 |
+
target_score_ope,
|
472 |
+
target_score_con,
|
473 |
+
target_score_ext,
|
474 |
+
target_score_agr,
|
475 |
+
target_score_nneu,
|
476 |
+
],
|
477 |
+
colleague=colleague_type(practical_subtasks),
|
478 |
+
equal_coefficients=equal_coefficient,
|
479 |
+
out=False,
|
480 |
+
)
|
481 |
+
|
482 |
+
df = apply_rounding_and_rename_columns(b5.df_files_colleague_)
|
483 |
+
|
484 |
+
df_hidden = df.drop(columns=config_data.Settings_SHORT_PROFESSIONAL_SKILLS)
|
485 |
+
|
486 |
+
df_hidden.to_csv(
|
487 |
+
colleague_type(practical_subtasks) + config_data.Filenames_COLLEAGUE_RANKING
|
488 |
+
)
|
489 |
+
|
490 |
+
df_hidden.reset_index(inplace=True)
|
491 |
+
|
492 |
+
person_id = int(df_hidden.iloc[0][config_data.Dataframes_PT_SCORES[0][0]]) - 1
|
493 |
+
|
494 |
+
person_metadata = create_person_metadata(person_id, files, video_metadata)
|
495 |
+
|
496 |
+
existing_tuple = (
|
497 |
+
gr.Row(visible=True),
|
498 |
+
gr.Column(visible=True),
|
499 |
+
dataframe(
|
500 |
+
headers=df_hidden.columns.tolist(),
|
501 |
+
values=df_hidden.values.tolist(),
|
502 |
+
visible=True,
|
503 |
+
),
|
504 |
+
files_create_ui(
|
505 |
+
colleague_type(practical_subtasks)
|
506 |
+
+ config_data.Filenames_COLLEAGUE_RANKING,
|
507 |
+
"single",
|
508 |
+
[".csv"],
|
509 |
+
config_data.OtherMessages_EXPORT_WT,
|
510 |
+
True,
|
511 |
+
False,
|
512 |
+
True,
|
513 |
+
"csv-container",
|
514 |
+
),
|
515 |
+
gr.Accordion(visible=False),
|
516 |
+
gr.HTML(visible=False),
|
517 |
+
dataframe(visible=False),
|
518 |
+
gr.Column(visible=True),
|
519 |
+
video_create_ui(
|
520 |
+
value=files[person_id],
|
521 |
+
file_name=Path(files[person_id]).name,
|
522 |
+
label="Best Person ID - " + str(person_id + 1),
|
523 |
+
visible=True,
|
524 |
+
elem_classes="video-sorted-container",
|
525 |
+
),
|
526 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
527 |
+
)
|
528 |
+
|
529 |
+
return existing_tuple[:-1] + person_metadata + existing_tuple[-1:]
|
530 |
+
elif (
|
531 |
+
practical_subtasks.lower() == "car characteristics"
|
532 |
+
or practical_subtasks.lower() == "mobile device application categories"
|
533 |
+
or practical_subtasks.lower() == "clothing style correlation"
|
534 |
+
):
|
535 |
+
if practical_subtasks.lower() == "car characteristics":
|
536 |
+
df_correlation_coefficients = read_csv_file(
|
537 |
+
config_data.Links_CAR_CHARACTERISTICS,
|
538 |
+
["Style and performance", "Safety and practicality"],
|
539 |
+
)
|
540 |
+
elif practical_subtasks.lower() == "mobile device application categories":
|
541 |
+
df_correlation_coefficients = read_csv_file(
|
542 |
+
config_data.Links_MDA_CATEGORIES
|
543 |
+
)
|
544 |
+
elif practical_subtasks.lower() == "clothing style correlation":
|
545 |
+
df_correlation_coefficients = read_csv_file(config_data.Links_CLOTHING_SC)
|
546 |
+
|
547 |
+
pt_scores_copy = pt_scores.iloc[:, 1:].copy()
|
548 |
+
|
549 |
+
preprocess_scores_df(pt_scores_copy, config_data.Dataframes_PT_SCORES[0][0])
|
550 |
+
|
551 |
+
b5._priority_calculation(
|
552 |
+
df_files=pt_scores_copy,
|
553 |
+
correlation_coefficients=df_correlation_coefficients,
|
554 |
+
col_name_ocean="Trait",
|
555 |
+
threshold=threshold_consumer_preferences,
|
556 |
+
number_priority=number_priority,
|
557 |
+
number_importance_traits=number_importance_traits,
|
558 |
+
out=False,
|
559 |
+
)
|
560 |
+
|
561 |
+
df_files_priority = b5.df_files_priority_.copy()
|
562 |
+
df_files_priority.reset_index(inplace=True)
|
563 |
+
|
564 |
+
df = apply_rounding_and_rename_columns(df_files_priority.iloc[:, 1:])
|
565 |
+
|
566 |
+
preprocess_scores_df(df, config_data.Dataframes_PT_SCORES[0][0])
|
567 |
+
|
568 |
+
df_hidden = df.drop(columns=config_data.Settings_SHORT_PROFESSIONAL_SKILLS)
|
569 |
+
|
570 |
+
df_hidden.to_csv(consumer_preferences(practical_subtasks))
|
571 |
+
|
572 |
+
df_hidden.reset_index(inplace=True)
|
573 |
+
|
574 |
+
person_id = int(df_hidden.iloc[0][config_data.Dataframes_PT_SCORES[0][0]]) - 1
|
575 |
+
|
576 |
+
person_metadata = create_person_metadata(person_id, files, video_metadata)
|
577 |
+
|
578 |
+
existing_tuple = (
|
579 |
+
gr.Row(visible=True),
|
580 |
+
gr.Column(visible=True),
|
581 |
+
dataframe(
|
582 |
+
headers=df_hidden.columns.tolist(),
|
583 |
+
values=df_hidden.values.tolist(),
|
584 |
+
visible=True,
|
585 |
+
),
|
586 |
+
files_create_ui(
|
587 |
+
consumer_preferences(practical_subtasks),
|
588 |
+
"single",
|
589 |
+
[".csv"],
|
590 |
+
config_data.OtherMessages_EXPORT_CP,
|
591 |
+
True,
|
592 |
+
False,
|
593 |
+
True,
|
594 |
+
"csv-container",
|
595 |
+
),
|
596 |
+
gr.Accordion(visible=False),
|
597 |
+
gr.HTML(visible=False),
|
598 |
+
dataframe(visible=False),
|
599 |
+
gr.Column(visible=True),
|
600 |
+
video_create_ui(
|
601 |
+
value=files[person_id],
|
602 |
+
file_name=Path(files[person_id]).name,
|
603 |
+
label="Best Person ID - " + str(person_id + 1),
|
604 |
+
visible=True,
|
605 |
+
elem_classes="video-sorted-container",
|
606 |
+
),
|
607 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
608 |
+
)
|
609 |
+
|
610 |
+
return existing_tuple[:-1] + person_metadata + existing_tuple[-1:]
|
611 |
+
else:
|
612 |
+
gr.Info(config_data.InformationMessages_NOTI_IN_DEV)
|
613 |
+
|
614 |
+
return (
|
615 |
+
gr.Row(visible=False),
|
616 |
+
gr.Column(visible=False),
|
617 |
+
dataframe(visible=False),
|
618 |
+
files_create_ui(
|
619 |
+
None,
|
620 |
+
"single",
|
621 |
+
[".csv"],
|
622 |
+
config_data.OtherMessages_EXPORT_PS,
|
623 |
+
True,
|
624 |
+
False,
|
625 |
+
False,
|
626 |
+
"csv-container",
|
627 |
+
),
|
628 |
+
gr.Accordion(visible=False),
|
629 |
+
gr.HTML(visible=False),
|
630 |
+
dataframe(visible=False),
|
631 |
+
gr.Column(visible=False),
|
632 |
+
video_create_ui(visible=False),
|
633 |
+
gr.Column(visible=False),
|
634 |
+
gr.Row(visible=False),
|
635 |
+
gr.Row(visible=False),
|
636 |
+
gr.Image(visible=False),
|
637 |
+
textbox_create_ui(visible=False),
|
638 |
+
gr.Row(visible=False),
|
639 |
+
gr.Image(visible=False),
|
640 |
+
textbox_create_ui(visible=False),
|
641 |
+
gr.Row(visible=False),
|
642 |
+
gr.Row(visible=False),
|
643 |
+
gr.Image(visible=False),
|
644 |
+
textbox_create_ui(visible=False),
|
645 |
+
gr.Row(visible=False),
|
646 |
+
gr.Image(visible=False),
|
647 |
+
textbox_create_ui(visible=False),
|
648 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, True),
|
649 |
+
)
|
app/event_handlers/calculate_pt_scores_blocks.py
ADDED
@@ -0,0 +1,280 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: clear_blocks.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for clearing Gradio app blocks and components.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import spaces
|
9 |
+
import gradio as gr
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
from app.oceanai_init import b5
|
13 |
+
from app.config import config_data
|
14 |
+
from app.description_steps import STEP_2
|
15 |
+
from app.utils import get_language_settings
|
16 |
+
from app.practical_tasks import supported_practical_tasks
|
17 |
+
from app.components import (
|
18 |
+
html_message,
|
19 |
+
button,
|
20 |
+
dataframe,
|
21 |
+
files_create_ui,
|
22 |
+
radio_create_ui,
|
23 |
+
number_create_ui,
|
24 |
+
dropdown_create_ui,
|
25 |
+
video_create_ui,
|
26 |
+
textbox_create_ui,
|
27 |
+
)
|
28 |
+
|
29 |
+
|
30 |
+
@spaces.GPU
|
31 |
+
def event_handler_calculate_pt_scores_blocks(language, files, evt_data: gr.EventData):
|
32 |
+
_ = evt_data.target.__class__.__name__
|
33 |
+
|
34 |
+
lang_id, _ = get_language_settings(language)
|
35 |
+
|
36 |
+
out = False
|
37 |
+
b5.get_avt_predictions_gradio(
|
38 |
+
paths=files, url_accuracy="", accuracy=False, lang="en", out=out
|
39 |
+
)
|
40 |
+
|
41 |
+
first_practical_task = next(iter(supported_practical_tasks))
|
42 |
+
|
43 |
+
if len(b5.df_files_) == 0:
|
44 |
+
gr.Warning(config_data.OtherMessages_CALCULATE_PT_SCORES_ERR)
|
45 |
+
|
46 |
+
return (
|
47 |
+
html_message(config_data.OtherMessages_CALCULATE_PT_SCORES_ERR, False),
|
48 |
+
dataframe(visible=False),
|
49 |
+
files_create_ui(
|
50 |
+
None,
|
51 |
+
"single",
|
52 |
+
[".csv"],
|
53 |
+
config_data.OtherMessages_EXPORT_PT_SCORES[lang_id],
|
54 |
+
True,
|
55 |
+
False,
|
56 |
+
False,
|
57 |
+
"csv-container",
|
58 |
+
),
|
59 |
+
gr.HTML(value=STEP_2[lang_id], visible=False),
|
60 |
+
gr.Column(visible=False),
|
61 |
+
radio_create_ui(
|
62 |
+
first_practical_task,
|
63 |
+
config_data.Labels_PRACTICAL_TASKS_LABEL,
|
64 |
+
list(map(str, supported_practical_tasks.keys())),
|
65 |
+
config_data.InformationMessages_PRACTICAL_TASKS_INFO,
|
66 |
+
True,
|
67 |
+
True,
|
68 |
+
),
|
69 |
+
radio_create_ui(
|
70 |
+
supported_practical_tasks[first_practical_task][0],
|
71 |
+
config_data.Labels_PRACTICAL_SUBTASKS_LABEL,
|
72 |
+
supported_practical_tasks[first_practical_task],
|
73 |
+
config_data.InformationMessages_PRACTICAL_SUBTASKS_INFO,
|
74 |
+
True,
|
75 |
+
True,
|
76 |
+
),
|
77 |
+
gr.JSON(
|
78 |
+
value={
|
79 |
+
str(task): supported_practical_tasks.get(task, [None])[0]
|
80 |
+
for task in supported_practical_tasks.keys()
|
81 |
+
},
|
82 |
+
visible=False,
|
83 |
+
render=True,
|
84 |
+
),
|
85 |
+
gr.Column(visible=False),
|
86 |
+
dropdown_create_ui(visible=False),
|
87 |
+
number_create_ui(visible=False),
|
88 |
+
number_create_ui(visible=False),
|
89 |
+
dropdown_create_ui(visible=False),
|
90 |
+
number_create_ui(visible=False),
|
91 |
+
number_create_ui(visible=False),
|
92 |
+
number_create_ui(visible=False),
|
93 |
+
number_create_ui(visible=False),
|
94 |
+
number_create_ui(visible=False),
|
95 |
+
number_create_ui(visible=False),
|
96 |
+
number_create_ui(visible=False),
|
97 |
+
number_create_ui(visible=False),
|
98 |
+
number_create_ui(visible=False),
|
99 |
+
dropdown_create_ui(visible=False),
|
100 |
+
number_create_ui(visible=False),
|
101 |
+
number_create_ui(visible=False),
|
102 |
+
number_create_ui(visible=False),
|
103 |
+
number_create_ui(visible=False),
|
104 |
+
number_create_ui(visible=False),
|
105 |
+
button(
|
106 |
+
config_data.OtherMessages_CALCULATE_PRACTICAL_TASK,
|
107 |
+
True,
|
108 |
+
1,
|
109 |
+
"./images/pt.ico",
|
110 |
+
False,
|
111 |
+
"calculate_practical_task",
|
112 |
+
),
|
113 |
+
gr.Row(visible=False),
|
114 |
+
gr.Column(visible=False),
|
115 |
+
dataframe(visible=False),
|
116 |
+
files_create_ui(
|
117 |
+
None,
|
118 |
+
"single",
|
119 |
+
[".csv"],
|
120 |
+
config_data.OtherMessages_EXPORT_PS,
|
121 |
+
True,
|
122 |
+
False,
|
123 |
+
False,
|
124 |
+
"csv-container",
|
125 |
+
),
|
126 |
+
gr.Accordion(visible=False),
|
127 |
+
gr.HTML(visible=False),
|
128 |
+
dataframe(visible=False),
|
129 |
+
gr.Column(visible=False),
|
130 |
+
video_create_ui(visible=False),
|
131 |
+
gr.Column(visible=False),
|
132 |
+
gr.Row(visible=False),
|
133 |
+
gr.Row(visible=False),
|
134 |
+
gr.Image(visible=False),
|
135 |
+
textbox_create_ui(visible=False),
|
136 |
+
gr.Row(visible=False),
|
137 |
+
gr.Image(visible=False),
|
138 |
+
textbox_create_ui(visible=False),
|
139 |
+
gr.Row(visible=False),
|
140 |
+
gr.Row(visible=False),
|
141 |
+
gr.Image(visible=False),
|
142 |
+
textbox_create_ui(visible=False),
|
143 |
+
gr.Row(visible=False),
|
144 |
+
gr.Image(visible=False),
|
145 |
+
textbox_create_ui(visible=False),
|
146 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
147 |
+
)
|
148 |
+
|
149 |
+
b5.df_files_.to_csv(config_data.Filenames_PT_SCORES)
|
150 |
+
|
151 |
+
df_files = b5.df_files_.copy()
|
152 |
+
df_files.reset_index(inplace=True)
|
153 |
+
|
154 |
+
return (
|
155 |
+
html_message(
|
156 |
+
config_data.InformationMessages_NOTI_VIDEOS[lang_id], False, False
|
157 |
+
),
|
158 |
+
dataframe(
|
159 |
+
headers=(config_data.Dataframes_PT_SCORES[lang_id]),
|
160 |
+
values=df_files.values.tolist(),
|
161 |
+
visible=True,
|
162 |
+
),
|
163 |
+
files_create_ui(
|
164 |
+
config_data.Filenames_PT_SCORES,
|
165 |
+
"single",
|
166 |
+
[".csv"],
|
167 |
+
config_data.OtherMessages_EXPORT_PT_SCORES[lang_id],
|
168 |
+
True,
|
169 |
+
False,
|
170 |
+
True,
|
171 |
+
"csv-container",
|
172 |
+
),
|
173 |
+
gr.HTML(value=STEP_2[lang_id], visible=True),
|
174 |
+
gr.Column(visible=True),
|
175 |
+
radio_create_ui(
|
176 |
+
first_practical_task,
|
177 |
+
"Practical tasks",
|
178 |
+
list(map(str, supported_practical_tasks.keys())),
|
179 |
+
config_data.InformationMessages_PRACTICAL_TASKS_INFO,
|
180 |
+
True,
|
181 |
+
True,
|
182 |
+
),
|
183 |
+
radio_create_ui(
|
184 |
+
supported_practical_tasks[first_practical_task][0],
|
185 |
+
"Practical subtasks",
|
186 |
+
supported_practical_tasks[first_practical_task],
|
187 |
+
config_data.InformationMessages_PRACTICAL_SUBTASKS_INFO,
|
188 |
+
True,
|
189 |
+
True,
|
190 |
+
),
|
191 |
+
gr.JSON(
|
192 |
+
value={
|
193 |
+
str(task): supported_practical_tasks.get(task, [None])[0]
|
194 |
+
for task in supported_practical_tasks.keys()
|
195 |
+
},
|
196 |
+
visible=False,
|
197 |
+
render=True,
|
198 |
+
),
|
199 |
+
gr.Column(visible=True),
|
200 |
+
dropdown_create_ui(
|
201 |
+
label=f"Potential candidates by Personality Type of MBTI ({len(config_data.Settings_DROPDOWN_MBTI)})",
|
202 |
+
info=config_data.InformationMessages_DROPDOWN_MBTI_INFO,
|
203 |
+
choices=config_data.Settings_DROPDOWN_MBTI,
|
204 |
+
value=config_data.Settings_DROPDOWN_MBTI[0],
|
205 |
+
visible=True,
|
206 |
+
elem_classes="dropdown-container",
|
207 |
+
),
|
208 |
+
number_create_ui(
|
209 |
+
value=0.5,
|
210 |
+
minimum=0.0,
|
211 |
+
maximum=1.0,
|
212 |
+
step=0.01,
|
213 |
+
label=config_data.Labels_THRESHOLD_MBTI_LABEL,
|
214 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
215 |
+
show_label=True,
|
216 |
+
interactive=True,
|
217 |
+
visible=True,
|
218 |
+
render=True,
|
219 |
+
elem_classes="number-container",
|
220 |
+
),
|
221 |
+
number_create_ui(visible=False),
|
222 |
+
dropdown_create_ui(visible=False),
|
223 |
+
number_create_ui(visible=False),
|
224 |
+
number_create_ui(visible=False),
|
225 |
+
number_create_ui(visible=False),
|
226 |
+
number_create_ui(visible=False),
|
227 |
+
number_create_ui(visible=False),
|
228 |
+
number_create_ui(visible=False),
|
229 |
+
number_create_ui(visible=False),
|
230 |
+
number_create_ui(visible=False),
|
231 |
+
number_create_ui(visible=False),
|
232 |
+
dropdown_create_ui(visible=False),
|
233 |
+
number_create_ui(visible=False),
|
234 |
+
number_create_ui(visible=False),
|
235 |
+
number_create_ui(visible=False),
|
236 |
+
number_create_ui(visible=False),
|
237 |
+
number_create_ui(visible=False),
|
238 |
+
button(
|
239 |
+
config_data.OtherMessages_CALCULATE_PRACTICAL_TASK,
|
240 |
+
True,
|
241 |
+
1,
|
242 |
+
"./images/pt.ico",
|
243 |
+
True,
|
244 |
+
"calculate_practical_task",
|
245 |
+
),
|
246 |
+
gr.Row(visible=False),
|
247 |
+
gr.Column(visible=False),
|
248 |
+
dataframe(visible=False),
|
249 |
+
files_create_ui(
|
250 |
+
None,
|
251 |
+
"single",
|
252 |
+
[".csv"],
|
253 |
+
config_data.OtherMessages_EXPORT_PS,
|
254 |
+
True,
|
255 |
+
False,
|
256 |
+
False,
|
257 |
+
"csv-container",
|
258 |
+
),
|
259 |
+
gr.Accordion(visible=False),
|
260 |
+
gr.HTML(visible=False),
|
261 |
+
dataframe(visible=False),
|
262 |
+
gr.Column(visible=False),
|
263 |
+
video_create_ui(visible=False),
|
264 |
+
gr.Column(visible=False),
|
265 |
+
gr.Row(visible=False),
|
266 |
+
gr.Row(visible=False),
|
267 |
+
gr.Image(visible=False),
|
268 |
+
textbox_create_ui(visible=False),
|
269 |
+
gr.Row(visible=False),
|
270 |
+
gr.Image(visible=False),
|
271 |
+
textbox_create_ui(visible=False),
|
272 |
+
gr.Row(visible=False),
|
273 |
+
gr.Row(visible=False),
|
274 |
+
gr.Image(visible=False),
|
275 |
+
textbox_create_ui(visible=False),
|
276 |
+
gr.Row(visible=False),
|
277 |
+
gr.Image(visible=False),
|
278 |
+
textbox_create_ui(visible=False),
|
279 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
280 |
+
)
|
app/event_handlers/clear_blocks.py
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: clear_blocks.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for clearing Gradio app blocks and components.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
from app.config import config_data
|
12 |
+
from app.description_steps import STEP_2
|
13 |
+
from app.practical_tasks import supported_practical_tasks
|
14 |
+
from app.components import (
|
15 |
+
html_message,
|
16 |
+
files_create_ui,
|
17 |
+
video_create_ui,
|
18 |
+
button,
|
19 |
+
dataframe,
|
20 |
+
radio_create_ui,
|
21 |
+
number_create_ui,
|
22 |
+
dropdown_create_ui,
|
23 |
+
textbox_create_ui,
|
24 |
+
)
|
25 |
+
from app.utils import get_language_settings
|
26 |
+
|
27 |
+
|
28 |
+
def event_handler_clear_blocks(language):
|
29 |
+
lang_id, _ = get_language_settings(language)
|
30 |
+
|
31 |
+
first_practical_task = next(iter(supported_practical_tasks))
|
32 |
+
|
33 |
+
return (
|
34 |
+
html_message(config_data.InformationMessages_NOTI_VIDEOS[lang_id], False),
|
35 |
+
files_create_ui(
|
36 |
+
label="{} ({})".format(
|
37 |
+
config_data.OtherMessages_VIDEO_FILES[lang_id],
|
38 |
+
", ".join(config_data.Settings_SUPPORTED_VIDEO_EXT),
|
39 |
+
),
|
40 |
+
file_types=[f".{ext}" for ext in config_data.Settings_SUPPORTED_VIDEO_EXT],
|
41 |
+
),
|
42 |
+
video_create_ui(),
|
43 |
+
button(
|
44 |
+
config_data.OtherMessages_CALCULATE_PT_SCORES[lang_id],
|
45 |
+
False,
|
46 |
+
3,
|
47 |
+
"./images/calculate_pt_scores.ico",
|
48 |
+
True,
|
49 |
+
"calculate_oceanai",
|
50 |
+
),
|
51 |
+
button(
|
52 |
+
config_data.OtherMessages_CLEAR_APP[lang_id],
|
53 |
+
False,
|
54 |
+
1,
|
55 |
+
"./images/clear.ico",
|
56 |
+
True,
|
57 |
+
"clear_oceanai",
|
58 |
+
),
|
59 |
+
dataframe(visible=False),
|
60 |
+
files_create_ui(
|
61 |
+
None,
|
62 |
+
"single",
|
63 |
+
[".csv"],
|
64 |
+
config_data.OtherMessages_EXPORT_PT_SCORES[lang_id],
|
65 |
+
True,
|
66 |
+
False,
|
67 |
+
False,
|
68 |
+
"csv-container",
|
69 |
+
),
|
70 |
+
gr.HTML(value=STEP_2[lang_id], visible=False),
|
71 |
+
gr.Column(visible=False),
|
72 |
+
radio_create_ui(
|
73 |
+
first_practical_task,
|
74 |
+
"Practical tasks",
|
75 |
+
list(map(str, supported_practical_tasks.keys())),
|
76 |
+
config_data.InformationMessages_PRACTICAL_TASKS_INFO,
|
77 |
+
True,
|
78 |
+
True,
|
79 |
+
),
|
80 |
+
radio_create_ui(
|
81 |
+
supported_practical_tasks[first_practical_task][0],
|
82 |
+
"Practical subtasks",
|
83 |
+
supported_practical_tasks[first_practical_task],
|
84 |
+
config_data.InformationMessages_PRACTICAL_SUBTASKS_INFO,
|
85 |
+
True,
|
86 |
+
True,
|
87 |
+
),
|
88 |
+
gr.JSON(
|
89 |
+
value={
|
90 |
+
str(task): supported_practical_tasks.get(task, [None])[0]
|
91 |
+
for task in supported_practical_tasks.keys()
|
92 |
+
},
|
93 |
+
visible=False,
|
94 |
+
render=True,
|
95 |
+
),
|
96 |
+
gr.Column(visible=False),
|
97 |
+
dropdown_create_ui(visible=False),
|
98 |
+
number_create_ui(visible=False),
|
99 |
+
number_create_ui(visible=False),
|
100 |
+
dropdown_create_ui(visible=False),
|
101 |
+
number_create_ui(visible=False),
|
102 |
+
number_create_ui(visible=False),
|
103 |
+
number_create_ui(visible=False),
|
104 |
+
number_create_ui(visible=False),
|
105 |
+
number_create_ui(visible=False),
|
106 |
+
number_create_ui(visible=False),
|
107 |
+
number_create_ui(visible=False),
|
108 |
+
number_create_ui(visible=False),
|
109 |
+
number_create_ui(visible=False),
|
110 |
+
dropdown_create_ui(visible=False),
|
111 |
+
number_create_ui(visible=False),
|
112 |
+
number_create_ui(visible=False),
|
113 |
+
number_create_ui(visible=False),
|
114 |
+
number_create_ui(visible=False),
|
115 |
+
number_create_ui(visible=False),
|
116 |
+
gr.Row(visible=False),
|
117 |
+
gr.Column(visible=False),
|
118 |
+
dataframe(visible=False),
|
119 |
+
files_create_ui(
|
120 |
+
None,
|
121 |
+
"single",
|
122 |
+
[".csv"],
|
123 |
+
config_data.OtherMessages_EXPORT_PS,
|
124 |
+
True,
|
125 |
+
False,
|
126 |
+
False,
|
127 |
+
"csv-container",
|
128 |
+
),
|
129 |
+
gr.Accordion(visible=False),
|
130 |
+
gr.HTML(visible=False),
|
131 |
+
dataframe(visible=False),
|
132 |
+
gr.Column(visible=False),
|
133 |
+
video_create_ui(visible=False),
|
134 |
+
gr.Column(visible=False),
|
135 |
+
gr.Row(visible=False),
|
136 |
+
gr.Row(visible=False),
|
137 |
+
gr.Image(visible=False),
|
138 |
+
textbox_create_ui(visible=False),
|
139 |
+
gr.Row(visible=False),
|
140 |
+
gr.Image(visible=False),
|
141 |
+
textbox_create_ui(visible=False),
|
142 |
+
gr.Row(visible=False),
|
143 |
+
gr.Row(visible=False),
|
144 |
+
gr.Image(visible=False),
|
145 |
+
textbox_create_ui(visible=False),
|
146 |
+
gr.Row(visible=False),
|
147 |
+
gr.Image(visible=False),
|
148 |
+
textbox_create_ui(visible=False),
|
149 |
+
html_message(config_data.InformationMessages_NOTI_IN_DEV, False, False),
|
150 |
+
)
|
app/event_handlers/dropdown_candidates.py
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: dropdown_candidates.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for Gradio app to filter dropdown candidates based on selected dropdown candidates.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
# Importing necessary components for the Gradio app
|
9 |
+
from app.config import config_data
|
10 |
+
from app.utils import read_csv_file, extract_profession_weights
|
11 |
+
from app.components import number_create_ui, dropdown_create_ui
|
12 |
+
|
13 |
+
|
14 |
+
def event_handler_dropdown_candidates(practical_subtasks, dropdown_candidates):
|
15 |
+
if practical_subtasks.lower() == "professional groups":
|
16 |
+
df_traits_priority_for_professions = read_csv_file(
|
17 |
+
config_data.Links_PROFESSIONS
|
18 |
+
)
|
19 |
+
|
20 |
+
weights, interactive = extract_profession_weights(
|
21 |
+
df_traits_priority_for_professions,
|
22 |
+
dropdown_candidates,
|
23 |
+
)
|
24 |
+
|
25 |
+
return (
|
26 |
+
number_create_ui(
|
27 |
+
value=weights[0],
|
28 |
+
minimum=config_data.Values_0_100[0],
|
29 |
+
maximum=config_data.Values_0_100[1],
|
30 |
+
step=1,
|
31 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_OPE_LABEL,
|
32 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
33 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
34 |
+
),
|
35 |
+
show_label=True,
|
36 |
+
interactive=interactive,
|
37 |
+
visible=True,
|
38 |
+
render=True,
|
39 |
+
elem_classes="number-container",
|
40 |
+
),
|
41 |
+
number_create_ui(
|
42 |
+
value=weights[1],
|
43 |
+
minimum=config_data.Values_0_100[0],
|
44 |
+
maximum=config_data.Values_0_100[1],
|
45 |
+
step=1,
|
46 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_CON_LABEL,
|
47 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
48 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
49 |
+
),
|
50 |
+
show_label=True,
|
51 |
+
interactive=interactive,
|
52 |
+
visible=True,
|
53 |
+
render=True,
|
54 |
+
elem_classes="number-container",
|
55 |
+
),
|
56 |
+
number_create_ui(
|
57 |
+
value=weights[2],
|
58 |
+
minimum=config_data.Values_0_100[0],
|
59 |
+
maximum=config_data.Values_0_100[1],
|
60 |
+
step=1,
|
61 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_EXT_LABEL,
|
62 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
63 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
64 |
+
),
|
65 |
+
show_label=True,
|
66 |
+
interactive=interactive,
|
67 |
+
visible=True,
|
68 |
+
render=True,
|
69 |
+
elem_classes="number-container",
|
70 |
+
),
|
71 |
+
number_create_ui(
|
72 |
+
value=weights[3],
|
73 |
+
minimum=config_data.Values_0_100[0],
|
74 |
+
maximum=config_data.Values_0_100[1],
|
75 |
+
step=1,
|
76 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_AGR_LABEL,
|
77 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
78 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
79 |
+
),
|
80 |
+
show_label=True,
|
81 |
+
interactive=interactive,
|
82 |
+
visible=True,
|
83 |
+
render=True,
|
84 |
+
elem_classes="number-container",
|
85 |
+
),
|
86 |
+
number_create_ui(
|
87 |
+
value=weights[4],
|
88 |
+
minimum=config_data.Values_0_100[0],
|
89 |
+
maximum=config_data.Values_0_100[1],
|
90 |
+
step=1,
|
91 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_NNEU_LABEL,
|
92 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
93 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
94 |
+
),
|
95 |
+
show_label=True,
|
96 |
+
interactive=interactive,
|
97 |
+
visible=True,
|
98 |
+
render=True,
|
99 |
+
elem_classes="number-container",
|
100 |
+
),
|
101 |
+
)
|
102 |
+
else:
|
103 |
+
return (
|
104 |
+
number_create_ui(visible=False),
|
105 |
+
number_create_ui(visible=False),
|
106 |
+
number_create_ui(visible=False),
|
107 |
+
number_create_ui(visible=False),
|
108 |
+
number_create_ui(visible=False),
|
109 |
+
)
|
app/event_handlers/event_handlers.py
ADDED
@@ -0,0 +1,404 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: event_handlers.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: File containing functions for configuring event handlers for Gradio components.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
from app.event_handlers.languages import event_handler_languages
|
12 |
+
from app.event_handlers.files import (
|
13 |
+
event_handler_files,
|
14 |
+
event_handler_files_select,
|
15 |
+
event_handler_files_delete,
|
16 |
+
)
|
17 |
+
from app.event_handlers.examples_blocks import event_handler_examples_blocks
|
18 |
+
from app.event_handlers.clear_blocks import event_handler_clear_blocks
|
19 |
+
from app.event_handlers.calculate_pt_scores_blocks import (
|
20 |
+
event_handler_calculate_pt_scores_blocks,
|
21 |
+
)
|
22 |
+
from app.event_handlers.practical_tasks import event_handler_practical_tasks
|
23 |
+
from app.event_handlers.practical_subtasks import event_handler_practical_subtasks
|
24 |
+
from app.event_handlers.dropdown_candidates import event_handler_dropdown_candidates
|
25 |
+
from app.event_handlers.calculate_practical_tasks import (
|
26 |
+
event_handler_calculate_practical_task_blocks,
|
27 |
+
)
|
28 |
+
from app.event_handlers.practical_task_sorted import event_handler_practical_task_sorted
|
29 |
+
|
30 |
+
|
31 |
+
def setup_app_event_handlers(
|
32 |
+
description,
|
33 |
+
step_1,
|
34 |
+
notifications,
|
35 |
+
files,
|
36 |
+
video,
|
37 |
+
examples,
|
38 |
+
calculate_pt_scores,
|
39 |
+
clear_app,
|
40 |
+
pt_scores,
|
41 |
+
csv_pt_scores,
|
42 |
+
step_2,
|
43 |
+
practical_tasks,
|
44 |
+
practical_subtasks,
|
45 |
+
settings_practical_tasks,
|
46 |
+
dropdown_mbti,
|
47 |
+
threshold_mbti,
|
48 |
+
threshold_professional_skills,
|
49 |
+
dropdown_professional_skills,
|
50 |
+
target_score_ope,
|
51 |
+
target_score_con,
|
52 |
+
target_score_ext,
|
53 |
+
target_score_agr,
|
54 |
+
target_score_nneu,
|
55 |
+
equal_coefficient,
|
56 |
+
number_priority,
|
57 |
+
number_importance_traits,
|
58 |
+
threshold_consumer_preferences,
|
59 |
+
dropdown_candidates,
|
60 |
+
number_openness,
|
61 |
+
number_conscientiousness,
|
62 |
+
number_extraversion,
|
63 |
+
number_agreeableness,
|
64 |
+
number_non_neuroticism,
|
65 |
+
calculate_practical_task,
|
66 |
+
practical_subtasks_selected,
|
67 |
+
practical_tasks_column,
|
68 |
+
sorted_videos,
|
69 |
+
sorted_videos_column,
|
70 |
+
practical_task_sorted,
|
71 |
+
csv_practical_task_sorted,
|
72 |
+
mbti_accordion,
|
73 |
+
mbti_description,
|
74 |
+
mbti_description_data,
|
75 |
+
video_sorted_column,
|
76 |
+
video_sorted,
|
77 |
+
metadata,
|
78 |
+
metadata_1,
|
79 |
+
name_row,
|
80 |
+
name_logo,
|
81 |
+
name,
|
82 |
+
surname_row,
|
83 |
+
surname_logo,
|
84 |
+
surname,
|
85 |
+
metadata_2,
|
86 |
+
email_row,
|
87 |
+
email_logo,
|
88 |
+
email,
|
89 |
+
phone_row,
|
90 |
+
phone_logo,
|
91 |
+
phone,
|
92 |
+
in_development,
|
93 |
+
tab1,
|
94 |
+
tab2,
|
95 |
+
tab3,
|
96 |
+
tab4,
|
97 |
+
languages_row,
|
98 |
+
country_flags,
|
99 |
+
languages,
|
100 |
+
):
|
101 |
+
# Events
|
102 |
+
languages.select(
|
103 |
+
fn=event_handler_languages,
|
104 |
+
inputs=[languages, files, video, pt_scores, csv_pt_scores],
|
105 |
+
outputs=[
|
106 |
+
description,
|
107 |
+
step_1,
|
108 |
+
country_flags,
|
109 |
+
languages,
|
110 |
+
tab1,
|
111 |
+
tab2,
|
112 |
+
tab3,
|
113 |
+
tab4,
|
114 |
+
files,
|
115 |
+
video,
|
116 |
+
examples,
|
117 |
+
calculate_pt_scores,
|
118 |
+
clear_app,
|
119 |
+
notifications,
|
120 |
+
pt_scores,
|
121 |
+
csv_pt_scores,
|
122 |
+
step_2,
|
123 |
+
],
|
124 |
+
queue=True,
|
125 |
+
)
|
126 |
+
files.change(
|
127 |
+
event_handler_files,
|
128 |
+
[languages, files, video, pt_scores],
|
129 |
+
[notifications, video, calculate_pt_scores, clear_app],
|
130 |
+
queue=True,
|
131 |
+
)
|
132 |
+
files.select(
|
133 |
+
event_handler_files_select,
|
134 |
+
[languages, files],
|
135 |
+
[video],
|
136 |
+
queue=True,
|
137 |
+
)
|
138 |
+
files.delete(
|
139 |
+
event_handler_files_delete,
|
140 |
+
[languages, files, video],
|
141 |
+
[video],
|
142 |
+
queue=True,
|
143 |
+
)
|
144 |
+
gr.on(
|
145 |
+
triggers=[calculate_pt_scores.click],
|
146 |
+
fn=event_handler_calculate_pt_scores_blocks,
|
147 |
+
inputs=[
|
148 |
+
languages,
|
149 |
+
files,
|
150 |
+
],
|
151 |
+
outputs=[
|
152 |
+
notifications,
|
153 |
+
pt_scores,
|
154 |
+
csv_pt_scores,
|
155 |
+
step_2,
|
156 |
+
practical_tasks_column,
|
157 |
+
practical_tasks,
|
158 |
+
practical_subtasks,
|
159 |
+
practical_subtasks_selected,
|
160 |
+
settings_practical_tasks,
|
161 |
+
dropdown_mbti,
|
162 |
+
threshold_mbti,
|
163 |
+
threshold_professional_skills,
|
164 |
+
dropdown_professional_skills,
|
165 |
+
target_score_ope,
|
166 |
+
target_score_con,
|
167 |
+
target_score_ext,
|
168 |
+
target_score_agr,
|
169 |
+
target_score_nneu,
|
170 |
+
equal_coefficient,
|
171 |
+
number_priority,
|
172 |
+
number_importance_traits,
|
173 |
+
threshold_consumer_preferences,
|
174 |
+
dropdown_candidates,
|
175 |
+
number_openness,
|
176 |
+
number_conscientiousness,
|
177 |
+
number_extraversion,
|
178 |
+
number_agreeableness,
|
179 |
+
number_non_neuroticism,
|
180 |
+
calculate_practical_task,
|
181 |
+
sorted_videos,
|
182 |
+
sorted_videos_column,
|
183 |
+
practical_task_sorted,
|
184 |
+
csv_practical_task_sorted,
|
185 |
+
mbti_accordion,
|
186 |
+
mbti_description,
|
187 |
+
mbti_description_data,
|
188 |
+
video_sorted_column,
|
189 |
+
video_sorted,
|
190 |
+
metadata,
|
191 |
+
metadata_1,
|
192 |
+
name_row,
|
193 |
+
name_logo,
|
194 |
+
name,
|
195 |
+
surname_row,
|
196 |
+
surname_logo,
|
197 |
+
surname,
|
198 |
+
metadata_2,
|
199 |
+
email_row,
|
200 |
+
email_logo,
|
201 |
+
email,
|
202 |
+
phone_row,
|
203 |
+
phone_logo,
|
204 |
+
phone,
|
205 |
+
in_development,
|
206 |
+
],
|
207 |
+
queue=True,
|
208 |
+
)
|
209 |
+
examples.click(
|
210 |
+
fn=event_handler_examples_blocks,
|
211 |
+
inputs=[],
|
212 |
+
outputs=[
|
213 |
+
files,
|
214 |
+
],
|
215 |
+
queue=True,
|
216 |
+
)
|
217 |
+
clear_app.click(
|
218 |
+
fn=event_handler_clear_blocks,
|
219 |
+
inputs=[languages],
|
220 |
+
outputs=[
|
221 |
+
notifications,
|
222 |
+
files,
|
223 |
+
video,
|
224 |
+
calculate_pt_scores,
|
225 |
+
clear_app,
|
226 |
+
pt_scores,
|
227 |
+
csv_pt_scores,
|
228 |
+
step_2,
|
229 |
+
practical_tasks_column,
|
230 |
+
practical_tasks,
|
231 |
+
practical_subtasks,
|
232 |
+
practical_subtasks_selected,
|
233 |
+
settings_practical_tasks,
|
234 |
+
dropdown_mbti,
|
235 |
+
threshold_mbti,
|
236 |
+
threshold_professional_skills,
|
237 |
+
dropdown_professional_skills,
|
238 |
+
target_score_ope,
|
239 |
+
target_score_con,
|
240 |
+
target_score_ext,
|
241 |
+
target_score_agr,
|
242 |
+
target_score_nneu,
|
243 |
+
equal_coefficient,
|
244 |
+
number_priority,
|
245 |
+
number_importance_traits,
|
246 |
+
threshold_consumer_preferences,
|
247 |
+
dropdown_candidates,
|
248 |
+
number_openness,
|
249 |
+
number_conscientiousness,
|
250 |
+
number_extraversion,
|
251 |
+
number_agreeableness,
|
252 |
+
number_non_neuroticism,
|
253 |
+
sorted_videos,
|
254 |
+
sorted_videos_column,
|
255 |
+
practical_task_sorted,
|
256 |
+
csv_practical_task_sorted,
|
257 |
+
mbti_accordion,
|
258 |
+
mbti_description,
|
259 |
+
mbti_description_data,
|
260 |
+
video_sorted_column,
|
261 |
+
video_sorted,
|
262 |
+
metadata,
|
263 |
+
metadata_1,
|
264 |
+
name_row,
|
265 |
+
name_logo,
|
266 |
+
name,
|
267 |
+
surname_row,
|
268 |
+
surname_logo,
|
269 |
+
surname,
|
270 |
+
metadata_2,
|
271 |
+
email_row,
|
272 |
+
email_logo,
|
273 |
+
email,
|
274 |
+
phone_row,
|
275 |
+
phone_logo,
|
276 |
+
phone,
|
277 |
+
in_development,
|
278 |
+
],
|
279 |
+
queue=True,
|
280 |
+
)
|
281 |
+
practical_tasks.change(
|
282 |
+
event_handler_practical_tasks,
|
283 |
+
[practical_tasks, practical_subtasks_selected],
|
284 |
+
[practical_subtasks],
|
285 |
+
queue=True,
|
286 |
+
)
|
287 |
+
practical_subtasks.change(
|
288 |
+
event_handler_practical_subtasks,
|
289 |
+
[practical_tasks, practical_subtasks, practical_subtasks_selected],
|
290 |
+
[
|
291 |
+
practical_subtasks_selected,
|
292 |
+
settings_practical_tasks,
|
293 |
+
dropdown_mbti,
|
294 |
+
threshold_mbti,
|
295 |
+
threshold_professional_skills,
|
296 |
+
dropdown_professional_skills,
|
297 |
+
target_score_ope,
|
298 |
+
target_score_con,
|
299 |
+
target_score_ext,
|
300 |
+
target_score_agr,
|
301 |
+
target_score_nneu,
|
302 |
+
equal_coefficient,
|
303 |
+
number_priority,
|
304 |
+
number_importance_traits,
|
305 |
+
threshold_consumer_preferences,
|
306 |
+
dropdown_candidates,
|
307 |
+
number_openness,
|
308 |
+
number_conscientiousness,
|
309 |
+
number_extraversion,
|
310 |
+
number_agreeableness,
|
311 |
+
number_non_neuroticism,
|
312 |
+
],
|
313 |
+
queue=True,
|
314 |
+
)
|
315 |
+
dropdown_candidates.change(
|
316 |
+
fn=event_handler_dropdown_candidates,
|
317 |
+
inputs=[practical_subtasks, dropdown_candidates],
|
318 |
+
outputs=[
|
319 |
+
number_openness,
|
320 |
+
number_conscientiousness,
|
321 |
+
number_extraversion,
|
322 |
+
number_agreeableness,
|
323 |
+
number_non_neuroticism,
|
324 |
+
],
|
325 |
+
queue=True,
|
326 |
+
)
|
327 |
+
calculate_practical_task.click(
|
328 |
+
fn=event_handler_calculate_practical_task_blocks,
|
329 |
+
inputs=[
|
330 |
+
files,
|
331 |
+
practical_subtasks,
|
332 |
+
pt_scores,
|
333 |
+
dropdown_mbti,
|
334 |
+
threshold_mbti,
|
335 |
+
threshold_professional_skills,
|
336 |
+
dropdown_professional_skills,
|
337 |
+
target_score_ope,
|
338 |
+
target_score_con,
|
339 |
+
target_score_ext,
|
340 |
+
target_score_agr,
|
341 |
+
target_score_nneu,
|
342 |
+
equal_coefficient,
|
343 |
+
number_priority,
|
344 |
+
number_importance_traits,
|
345 |
+
threshold_consumer_preferences,
|
346 |
+
number_openness,
|
347 |
+
number_conscientiousness,
|
348 |
+
number_extraversion,
|
349 |
+
number_agreeableness,
|
350 |
+
number_non_neuroticism,
|
351 |
+
],
|
352 |
+
outputs=[
|
353 |
+
sorted_videos,
|
354 |
+
sorted_videos_column,
|
355 |
+
practical_task_sorted,
|
356 |
+
csv_practical_task_sorted,
|
357 |
+
mbti_accordion,
|
358 |
+
mbti_description,
|
359 |
+
mbti_description_data,
|
360 |
+
video_sorted_column,
|
361 |
+
video_sorted,
|
362 |
+
metadata,
|
363 |
+
metadata_1,
|
364 |
+
name_row,
|
365 |
+
name_logo,
|
366 |
+
name,
|
367 |
+
surname_row,
|
368 |
+
surname_logo,
|
369 |
+
surname,
|
370 |
+
metadata_2,
|
371 |
+
email_row,
|
372 |
+
email_logo,
|
373 |
+
email,
|
374 |
+
phone_row,
|
375 |
+
phone_logo,
|
376 |
+
phone,
|
377 |
+
in_development,
|
378 |
+
],
|
379 |
+
queue=True,
|
380 |
+
)
|
381 |
+
practical_task_sorted.select(
|
382 |
+
event_handler_practical_task_sorted,
|
383 |
+
[files, practical_task_sorted],
|
384 |
+
[
|
385 |
+
video_sorted_column,
|
386 |
+
video_sorted,
|
387 |
+
metadata,
|
388 |
+
metadata_1,
|
389 |
+
name_row,
|
390 |
+
name_logo,
|
391 |
+
name,
|
392 |
+
surname_row,
|
393 |
+
surname_logo,
|
394 |
+
surname,
|
395 |
+
metadata_2,
|
396 |
+
email_row,
|
397 |
+
email_logo,
|
398 |
+
email,
|
399 |
+
phone_row,
|
400 |
+
phone_logo,
|
401 |
+
phone,
|
402 |
+
],
|
403 |
+
queue=True,
|
404 |
+
)
|
app/event_handlers/examples_blocks.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: examples_blocks.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for the addition of examples to the Gradio app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import re
|
9 |
+
from pathlib import Path
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
|
13 |
+
# import hashlib
|
14 |
+
# import time
|
15 |
+
|
16 |
+
# for _ in range(6):
|
17 |
+
# current_time = time.time()
|
18 |
+
# time_bytes = str(current_time).encode("utf-8")
|
19 |
+
# hash_object = hashlib.sha256(time_bytes)
|
20 |
+
# hex_digest = hash_object.hexdigest()
|
21 |
+
# print(hex_digest[:15])
|
22 |
+
|
23 |
+
|
24 |
+
def event_handler_examples_blocks():
|
25 |
+
videos_dir = Path("videos")
|
26 |
+
video_files = sorted(
|
27 |
+
(str(p) for p in videos_dir.glob("*.mp4")),
|
28 |
+
key=lambda x: int(re.search(r"\d+", Path(x).stem).group()),
|
29 |
+
)
|
30 |
+
return video_files
|
app/event_handlers/files.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: search.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for searching and filtering papers in the Gradio app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
from pathlib import Path
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
from app.config import config_data
|
13 |
+
from app.components import html_message, video_create_ui, button
|
14 |
+
from app.utils import get_language_settings
|
15 |
+
|
16 |
+
|
17 |
+
def event_handler_files(language, files, video, pt_scores):
|
18 |
+
lang_id, _ = get_language_settings(language)
|
19 |
+
|
20 |
+
if not files:
|
21 |
+
return (
|
22 |
+
html_message(config_data.InformationMessages_NOTI_VIDEOS[lang_id], False),
|
23 |
+
video_create_ui(label=config_data.OtherMessages_VIDEO_PLAYER[lang_id]),
|
24 |
+
button(
|
25 |
+
config_data.OtherMessages_CALCULATE_PT_SCORES[lang_id],
|
26 |
+
False,
|
27 |
+
3,
|
28 |
+
"./images/calculate_pt_scores.ico",
|
29 |
+
True,
|
30 |
+
"calculate_oceanai",
|
31 |
+
),
|
32 |
+
button(
|
33 |
+
config_data.OtherMessages_CLEAR_APP[lang_id],
|
34 |
+
False,
|
35 |
+
1,
|
36 |
+
"./images/clear.ico",
|
37 |
+
True,
|
38 |
+
"clear_oceanai",
|
39 |
+
),
|
40 |
+
)
|
41 |
+
|
42 |
+
if video not in files:
|
43 |
+
video = files[0]
|
44 |
+
|
45 |
+
return (
|
46 |
+
html_message(
|
47 |
+
config_data.OtherMessages_NOTI_CALCULATE[lang_id],
|
48 |
+
True,
|
49 |
+
False if pt_scores.shape[1] >= 7 else True,
|
50 |
+
),
|
51 |
+
video_create_ui(
|
52 |
+
value=video,
|
53 |
+
label=config_data.OtherMessages_VIDEO_PLAYER[lang_id],
|
54 |
+
file_name=Path(Path(video).name).name,
|
55 |
+
),
|
56 |
+
button(
|
57 |
+
config_data.OtherMessages_CALCULATE_PT_SCORES[lang_id],
|
58 |
+
True,
|
59 |
+
3,
|
60 |
+
"./images/calculate_pt_scores.ico",
|
61 |
+
True,
|
62 |
+
"calculate_oceanai",
|
63 |
+
),
|
64 |
+
button(
|
65 |
+
config_data.OtherMessages_CLEAR_APP[lang_id],
|
66 |
+
True,
|
67 |
+
1,
|
68 |
+
"./images/clear.ico",
|
69 |
+
True,
|
70 |
+
"clear_oceanai",
|
71 |
+
),
|
72 |
+
)
|
73 |
+
|
74 |
+
|
75 |
+
def event_handler_files_select(language, files, evt: gr.SelectData):
|
76 |
+
lang_id, _ = get_language_settings(language)
|
77 |
+
|
78 |
+
return video_create_ui(
|
79 |
+
value=files[evt.index],
|
80 |
+
label=config_data.OtherMessages_VIDEO_PLAYER[lang_id],
|
81 |
+
file_name=evt.value,
|
82 |
+
)
|
83 |
+
|
84 |
+
|
85 |
+
def event_handler_files_delete(language, files, video, evt: gr.DeletedFileData):
|
86 |
+
global block_event_handler_files
|
87 |
+
|
88 |
+
lang_id, _ = get_language_settings(language)
|
89 |
+
|
90 |
+
if video == evt.file.path:
|
91 |
+
video = files[0]
|
92 |
+
|
93 |
+
return video_create_ui(
|
94 |
+
value=video,
|
95 |
+
label=config_data.OtherMessages_VIDEO_PLAYER[lang_id],
|
96 |
+
file_name=Path(Path(video).name).name,
|
97 |
+
)
|
app/event_handlers/languages.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: languages.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Selected language event handlers for Gradio app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
from pathlib import Path
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
from app.description import DESCRIPTIONS
|
13 |
+
from app.description_steps import STEP_1, STEP_2
|
14 |
+
from app.config import config_data
|
15 |
+
from app.components import (
|
16 |
+
files_create_ui,
|
17 |
+
video_create_ui,
|
18 |
+
dropdown_create_ui,
|
19 |
+
button,
|
20 |
+
html_message,
|
21 |
+
dataframe,
|
22 |
+
)
|
23 |
+
from app.utils import get_language_settings
|
24 |
+
|
25 |
+
|
26 |
+
def event_handler_languages(languages, files, video, pt_scores, csv_pt_scores):
|
27 |
+
lang_id, choices = get_language_settings(languages)
|
28 |
+
|
29 |
+
if not video:
|
30 |
+
video = video_create_ui(label=config_data.OtherMessages_VIDEO_PLAYER[lang_id])
|
31 |
+
noti_videos = html_message(
|
32 |
+
config_data.InformationMessages_NOTI_VIDEOS[lang_id], False
|
33 |
+
)
|
34 |
+
else:
|
35 |
+
video = video_create_ui(
|
36 |
+
value=video,
|
37 |
+
label=config_data.OtherMessages_VIDEO_PLAYER[lang_id],
|
38 |
+
file_name=Path(video).name,
|
39 |
+
)
|
40 |
+
noti_videos = html_message(
|
41 |
+
config_data.OtherMessages_NOTI_CALCULATE[lang_id],
|
42 |
+
True,
|
43 |
+
False if pt_scores.shape[1] >= 7 else True,
|
44 |
+
)
|
45 |
+
|
46 |
+
csv_pt_scores = files_create_ui(
|
47 |
+
csv_pt_scores if pt_scores.shape[1] >= 7 else None,
|
48 |
+
"single",
|
49 |
+
[".csv"],
|
50 |
+
config_data.OtherMessages_EXPORT_PT_SCORES[lang_id],
|
51 |
+
True,
|
52 |
+
False,
|
53 |
+
True if pt_scores.shape[1] >= 7 else False,
|
54 |
+
"csv-container",
|
55 |
+
)
|
56 |
+
step_2 = gr.HTML(
|
57 |
+
value=STEP_2[lang_id], visible=True if pt_scores.shape[1] >= 7 else False
|
58 |
+
)
|
59 |
+
|
60 |
+
if pt_scores.shape[1] >= 7:
|
61 |
+
pt_scores = dataframe(
|
62 |
+
headers=(config_data.Dataframes_PT_SCORES[lang_id]),
|
63 |
+
values=pt_scores.values.tolist(),
|
64 |
+
visible=True,
|
65 |
+
)
|
66 |
+
else:
|
67 |
+
pt_scores = dataframe(visible=False)
|
68 |
+
|
69 |
+
return (
|
70 |
+
gr.Markdown(value=DESCRIPTIONS[lang_id]),
|
71 |
+
gr.HTML(value=STEP_1[lang_id]),
|
72 |
+
gr.Image(
|
73 |
+
value=config_data.StaticPaths_IMAGES + config_data.Images_LANGUAGES[lang_id]
|
74 |
+
),
|
75 |
+
dropdown_create_ui(
|
76 |
+
label=None,
|
77 |
+
info=None,
|
78 |
+
choices=choices,
|
79 |
+
value=choices[lang_id],
|
80 |
+
visible=True,
|
81 |
+
show_label=False,
|
82 |
+
elem_classes="dropdown-language-container",
|
83 |
+
),
|
84 |
+
gr.Tab(config_data.Labels_APP_LABEL[lang_id]),
|
85 |
+
gr.Tab(config_data.Labels_ABOUT_APP_LABEL[lang_id]),
|
86 |
+
gr.Tab(config_data.Labels_ABOUT_AUTHORS_LABEL[lang_id]),
|
87 |
+
gr.Tab(config_data.Labels_REQUIREMENTS_LABEL[lang_id]),
|
88 |
+
files_create_ui(
|
89 |
+
value=files,
|
90 |
+
label="{} ({})".format(
|
91 |
+
config_data.OtherMessages_VIDEO_FILES[lang_id],
|
92 |
+
", ".join(config_data.Settings_SUPPORTED_VIDEO_EXT),
|
93 |
+
),
|
94 |
+
file_types=[f".{ext}" for ext in config_data.Settings_SUPPORTED_VIDEO_EXT],
|
95 |
+
),
|
96 |
+
video,
|
97 |
+
button(
|
98 |
+
config_data.OtherMessages_EXAMPLES_APP[lang_id],
|
99 |
+
True,
|
100 |
+
1,
|
101 |
+
"./images/examples.ico",
|
102 |
+
True,
|
103 |
+
"examples_oceanai",
|
104 |
+
),
|
105 |
+
button(
|
106 |
+
config_data.OtherMessages_CALCULATE_PT_SCORES[lang_id],
|
107 |
+
True if files else False,
|
108 |
+
3,
|
109 |
+
"./images/calculate_pt_scores.ico",
|
110 |
+
True,
|
111 |
+
"calculate_oceanai",
|
112 |
+
),
|
113 |
+
button(
|
114 |
+
config_data.OtherMessages_CLEAR_APP[lang_id],
|
115 |
+
True if files else False,
|
116 |
+
1,
|
117 |
+
"./images/clear.ico",
|
118 |
+
True,
|
119 |
+
"clear_oceanai",
|
120 |
+
),
|
121 |
+
noti_videos,
|
122 |
+
pt_scores,
|
123 |
+
csv_pt_scores,
|
124 |
+
step_2,
|
125 |
+
)
|
app/event_handlers/practical_subtasks.py
ADDED
@@ -0,0 +1,420 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: practical_subtasks.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for Gradio app to filter practical subtasks based on selected practical subtasks.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
from app.config import config_data
|
12 |
+
from app.utils import read_csv_file, extract_profession_weights
|
13 |
+
from app.components import number_create_ui, dropdown_create_ui
|
14 |
+
|
15 |
+
|
16 |
+
def event_handler_practical_subtasks(
|
17 |
+
practical_tasks, practical_subtasks, practical_subtasks_selected
|
18 |
+
):
|
19 |
+
practical_subtasks_selected[practical_tasks] = practical_subtasks
|
20 |
+
|
21 |
+
if practical_subtasks.lower() == "16 personality types of mbti":
|
22 |
+
return (
|
23 |
+
practical_subtasks_selected,
|
24 |
+
gr.Column(visible=True),
|
25 |
+
dropdown_create_ui(
|
26 |
+
label=f"Potential candidates by Personality Type of MBTI ({len(config_data.Settings_DROPDOWN_MBTI)})",
|
27 |
+
info=config_data.InformationMessages_DROPDOWN_MBTI_INFO,
|
28 |
+
choices=config_data.Settings_DROPDOWN_MBTI,
|
29 |
+
value=config_data.Settings_DROPDOWN_MBTI[0],
|
30 |
+
visible=True,
|
31 |
+
elem_classes="dropdown-container",
|
32 |
+
),
|
33 |
+
number_create_ui(
|
34 |
+
value=0.5,
|
35 |
+
minimum=0.0,
|
36 |
+
maximum=1.0,
|
37 |
+
step=0.01,
|
38 |
+
label=config_data.Labels_THRESHOLD_MBTI_LABEL,
|
39 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
40 |
+
show_label=True,
|
41 |
+
interactive=True,
|
42 |
+
visible=True,
|
43 |
+
render=True,
|
44 |
+
elem_classes="number-container",
|
45 |
+
),
|
46 |
+
number_create_ui(visible=False),
|
47 |
+
dropdown_create_ui(visible=False),
|
48 |
+
number_create_ui(visible=False),
|
49 |
+
number_create_ui(visible=False),
|
50 |
+
number_create_ui(visible=False),
|
51 |
+
number_create_ui(visible=False),
|
52 |
+
number_create_ui(visible=False),
|
53 |
+
number_create_ui(visible=False),
|
54 |
+
number_create_ui(visible=False),
|
55 |
+
number_create_ui(visible=False),
|
56 |
+
number_create_ui(visible=False),
|
57 |
+
dropdown_create_ui(visible=False),
|
58 |
+
number_create_ui(visible=False),
|
59 |
+
number_create_ui(visible=False),
|
60 |
+
number_create_ui(visible=False),
|
61 |
+
number_create_ui(visible=False),
|
62 |
+
number_create_ui(visible=False),
|
63 |
+
)
|
64 |
+
elif practical_subtasks.lower() == "professional groups":
|
65 |
+
df_traits_priority_for_professions = read_csv_file(
|
66 |
+
config_data.Links_PROFESSIONS
|
67 |
+
)
|
68 |
+
weights_professions, interactive_professions = extract_profession_weights(
|
69 |
+
df_traits_priority_for_professions,
|
70 |
+
config_data.Settings_DROPDOWN_CANDIDATES[0],
|
71 |
+
)
|
72 |
+
|
73 |
+
return (
|
74 |
+
practical_subtasks_selected,
|
75 |
+
gr.Column(visible=True),
|
76 |
+
dropdown_create_ui(visible=False),
|
77 |
+
number_create_ui(visible=False),
|
78 |
+
number_create_ui(visible=False),
|
79 |
+
dropdown_create_ui(visible=False),
|
80 |
+
number_create_ui(visible=False),
|
81 |
+
number_create_ui(visible=False),
|
82 |
+
number_create_ui(visible=False),
|
83 |
+
number_create_ui(visible=False),
|
84 |
+
number_create_ui(visible=False),
|
85 |
+
number_create_ui(visible=False),
|
86 |
+
number_create_ui(visible=False),
|
87 |
+
number_create_ui(visible=False),
|
88 |
+
number_create_ui(visible=False),
|
89 |
+
dropdown_create_ui(
|
90 |
+
label=f"Potential candidates by professional responsibilities ({len(config_data.Settings_DROPDOWN_CANDIDATES)})",
|
91 |
+
info=config_data.InformationMessages_DROPDOWN_CANDIDATES_INFO,
|
92 |
+
choices=config_data.Settings_DROPDOWN_CANDIDATES,
|
93 |
+
value=config_data.Settings_DROPDOWN_CANDIDATES[0],
|
94 |
+
visible=True,
|
95 |
+
elem_classes="dropdown-container",
|
96 |
+
),
|
97 |
+
number_create_ui(
|
98 |
+
value=weights_professions[0],
|
99 |
+
minimum=config_data.Values_0_100[0],
|
100 |
+
maximum=config_data.Values_0_100[1],
|
101 |
+
step=1,
|
102 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_OPE_LABEL,
|
103 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
104 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
105 |
+
),
|
106 |
+
show_label=True,
|
107 |
+
interactive=interactive_professions,
|
108 |
+
visible=True,
|
109 |
+
render=True,
|
110 |
+
elem_classes="number-container",
|
111 |
+
),
|
112 |
+
number_create_ui(
|
113 |
+
value=weights_professions[1],
|
114 |
+
minimum=config_data.Values_0_100[0],
|
115 |
+
maximum=config_data.Values_0_100[1],
|
116 |
+
step=1,
|
117 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_CON_LABEL,
|
118 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
119 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
120 |
+
),
|
121 |
+
show_label=True,
|
122 |
+
interactive=interactive_professions,
|
123 |
+
visible=True,
|
124 |
+
render=True,
|
125 |
+
elem_classes="number-container",
|
126 |
+
),
|
127 |
+
number_create_ui(
|
128 |
+
value=weights_professions[2],
|
129 |
+
minimum=config_data.Values_0_100[0],
|
130 |
+
maximum=config_data.Values_0_100[1],
|
131 |
+
step=1,
|
132 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_EXT_LABEL,
|
133 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
134 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
135 |
+
),
|
136 |
+
show_label=True,
|
137 |
+
interactive=interactive_professions,
|
138 |
+
visible=True,
|
139 |
+
render=True,
|
140 |
+
elem_classes="number-container",
|
141 |
+
),
|
142 |
+
number_create_ui(
|
143 |
+
value=weights_professions[3],
|
144 |
+
minimum=config_data.Values_0_100[0],
|
145 |
+
maximum=config_data.Values_0_100[1],
|
146 |
+
step=1,
|
147 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_AGR_LABEL,
|
148 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
149 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
150 |
+
),
|
151 |
+
show_label=True,
|
152 |
+
interactive=interactive_professions,
|
153 |
+
visible=True,
|
154 |
+
render=True,
|
155 |
+
elem_classes="number-container",
|
156 |
+
),
|
157 |
+
number_create_ui(
|
158 |
+
value=weights_professions[4],
|
159 |
+
minimum=config_data.Values_0_100[0],
|
160 |
+
maximum=config_data.Values_0_100[1],
|
161 |
+
step=1,
|
162 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_NNEU_LABEL,
|
163 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
164 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
165 |
+
),
|
166 |
+
show_label=True,
|
167 |
+
interactive=interactive_professions,
|
168 |
+
visible=True,
|
169 |
+
render=True,
|
170 |
+
elem_classes="number-container",
|
171 |
+
),
|
172 |
+
)
|
173 |
+
elif practical_subtasks.lower() == "professional skills":
|
174 |
+
return (
|
175 |
+
practical_subtasks_selected,
|
176 |
+
gr.Column(visible=True),
|
177 |
+
dropdown_create_ui(visible=False),
|
178 |
+
number_create_ui(visible=False),
|
179 |
+
number_create_ui(
|
180 |
+
value=0.45,
|
181 |
+
minimum=0.0,
|
182 |
+
maximum=1.0,
|
183 |
+
step=0.01,
|
184 |
+
label=config_data.Labels_THRESHOLD_PROFESSIONAL_SKILLS_LABEL,
|
185 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
186 |
+
show_label=True,
|
187 |
+
interactive=True,
|
188 |
+
visible=True,
|
189 |
+
render=True,
|
190 |
+
elem_classes="number-container",
|
191 |
+
),
|
192 |
+
dropdown_create_ui(
|
193 |
+
label=f"Professional skills ({len(config_data.Settings_DROPDOWN_PROFESSIONAL_SKILLS)})",
|
194 |
+
info=config_data.InformationMessages_DROPDOWN_PROFESSIONAL_SKILLS_INFO,
|
195 |
+
choices=config_data.Settings_DROPDOWN_PROFESSIONAL_SKILLS,
|
196 |
+
value=config_data.Settings_DROPDOWN_PROFESSIONAL_SKILLS[0],
|
197 |
+
visible=True,
|
198 |
+
elem_classes="dropdown-container",
|
199 |
+
),
|
200 |
+
number_create_ui(visible=False),
|
201 |
+
number_create_ui(visible=False),
|
202 |
+
number_create_ui(visible=False),
|
203 |
+
number_create_ui(visible=False),
|
204 |
+
number_create_ui(visible=False),
|
205 |
+
number_create_ui(visible=False),
|
206 |
+
number_create_ui(visible=False),
|
207 |
+
number_create_ui(visible=False),
|
208 |
+
number_create_ui(visible=False),
|
209 |
+
dropdown_create_ui(visible=False),
|
210 |
+
number_create_ui(visible=False),
|
211 |
+
number_create_ui(visible=False),
|
212 |
+
number_create_ui(visible=False),
|
213 |
+
number_create_ui(visible=False),
|
214 |
+
number_create_ui(visible=False),
|
215 |
+
)
|
216 |
+
elif (
|
217 |
+
practical_subtasks.lower() == "finding a suitable junior colleague"
|
218 |
+
or practical_subtasks.lower() == "finding a suitable senior colleague"
|
219 |
+
):
|
220 |
+
return (
|
221 |
+
practical_subtasks_selected,
|
222 |
+
gr.Column(visible=True),
|
223 |
+
dropdown_create_ui(visible=False),
|
224 |
+
number_create_ui(visible=False),
|
225 |
+
number_create_ui(visible=False),
|
226 |
+
dropdown_create_ui(visible=False),
|
227 |
+
number_create_ui(
|
228 |
+
value=config_data.Values_TARGET_SCORES[0],
|
229 |
+
minimum=0.0,
|
230 |
+
maximum=1.0,
|
231 |
+
step=0.000001,
|
232 |
+
label=config_data.Labels_TARGET_SCORE_OPE_LABEL,
|
233 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
234 |
+
show_label=True,
|
235 |
+
interactive=True,
|
236 |
+
visible=True,
|
237 |
+
render=True,
|
238 |
+
elem_classes="number-container",
|
239 |
+
),
|
240 |
+
number_create_ui(
|
241 |
+
value=config_data.Values_TARGET_SCORES[1],
|
242 |
+
minimum=0.0,
|
243 |
+
maximum=1.0,
|
244 |
+
step=0.000001,
|
245 |
+
label=config_data.Labels_TARGET_SCORE_CON_LABEL,
|
246 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
247 |
+
show_label=True,
|
248 |
+
interactive=True,
|
249 |
+
visible=True,
|
250 |
+
render=True,
|
251 |
+
elem_classes="number-container",
|
252 |
+
),
|
253 |
+
number_create_ui(
|
254 |
+
value=config_data.Values_TARGET_SCORES[2],
|
255 |
+
minimum=0.0,
|
256 |
+
maximum=1.0,
|
257 |
+
step=0.000001,
|
258 |
+
label=config_data.Labels_TARGET_SCORE_EXT_LABEL,
|
259 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
260 |
+
show_label=True,
|
261 |
+
interactive=True,
|
262 |
+
visible=True,
|
263 |
+
render=True,
|
264 |
+
elem_classes="number-container",
|
265 |
+
),
|
266 |
+
number_create_ui(
|
267 |
+
value=config_data.Values_TARGET_SCORES[3],
|
268 |
+
minimum=0.0,
|
269 |
+
maximum=1.0,
|
270 |
+
step=0.000001,
|
271 |
+
label=config_data.Labels_TARGET_SCORE_AGR_LABEL,
|
272 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
273 |
+
show_label=True,
|
274 |
+
interactive=True,
|
275 |
+
visible=True,
|
276 |
+
render=True,
|
277 |
+
elem_classes="number-container",
|
278 |
+
),
|
279 |
+
number_create_ui(
|
280 |
+
value=config_data.Values_TARGET_SCORES[4],
|
281 |
+
minimum=0.0,
|
282 |
+
maximum=1.0,
|
283 |
+
step=0.000001,
|
284 |
+
label=config_data.Labels_TARGET_SCORE_NNEU_LABEL,
|
285 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
286 |
+
show_label=True,
|
287 |
+
interactive=True,
|
288 |
+
visible=True,
|
289 |
+
render=True,
|
290 |
+
elem_classes="number-container",
|
291 |
+
),
|
292 |
+
number_create_ui(
|
293 |
+
value=0.5,
|
294 |
+
minimum=0.0,
|
295 |
+
maximum=1.0,
|
296 |
+
step=0.01,
|
297 |
+
label=config_data.Labels_EQUAL_COEFFICIENT_LABEL,
|
298 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
299 |
+
show_label=True,
|
300 |
+
interactive=True,
|
301 |
+
visible=True,
|
302 |
+
render=True,
|
303 |
+
elem_classes="number-container",
|
304 |
+
),
|
305 |
+
number_create_ui(visible=False),
|
306 |
+
number_create_ui(visible=False),
|
307 |
+
number_create_ui(visible=False),
|
308 |
+
dropdown_create_ui(visible=False),
|
309 |
+
number_create_ui(visible=False),
|
310 |
+
number_create_ui(visible=False),
|
311 |
+
number_create_ui(visible=False),
|
312 |
+
number_create_ui(visible=False),
|
313 |
+
number_create_ui(visible=False),
|
314 |
+
)
|
315 |
+
elif (
|
316 |
+
practical_subtasks.lower() == "car characteristics"
|
317 |
+
or practical_subtasks.lower() == "mobile device application categories"
|
318 |
+
or practical_subtasks.lower() == "clothing style correlation"
|
319 |
+
):
|
320 |
+
if practical_subtasks.lower() == "car characteristics":
|
321 |
+
|
322 |
+
df_correlation_coefficients = read_csv_file(
|
323 |
+
config_data.Links_CAR_CHARACTERISTICS,
|
324 |
+
["Trait", "Style and performance", "Safety and practicality"],
|
325 |
+
)
|
326 |
+
|
327 |
+
elif practical_subtasks.lower() == "mobile device application categories":
|
328 |
+
|
329 |
+
df_correlation_coefficients = read_csv_file(
|
330 |
+
config_data.Links_MDA_CATEGORIES
|
331 |
+
)
|
332 |
+
|
333 |
+
elif practical_subtasks.lower() == "clothing style correlation":
|
334 |
+
df_correlation_coefficients = read_csv_file(config_data.Links_CLOTHING_SC)
|
335 |
+
|
336 |
+
return (
|
337 |
+
practical_subtasks_selected,
|
338 |
+
gr.Column(visible=True),
|
339 |
+
dropdown_create_ui(visible=False),
|
340 |
+
number_create_ui(visible=False),
|
341 |
+
number_create_ui(visible=False),
|
342 |
+
dropdown_create_ui(visible=False),
|
343 |
+
number_create_ui(visible=False),
|
344 |
+
number_create_ui(visible=False),
|
345 |
+
number_create_ui(visible=False),
|
346 |
+
number_create_ui(visible=False),
|
347 |
+
number_create_ui(visible=False),
|
348 |
+
number_create_ui(visible=False),
|
349 |
+
number_create_ui(
|
350 |
+
value=1,
|
351 |
+
minimum=1,
|
352 |
+
maximum=df_correlation_coefficients.columns.size,
|
353 |
+
step=1,
|
354 |
+
label=config_data.Labels_NUMBER_PRIORITY_LABEL,
|
355 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
356 |
+
1, df_correlation_coefficients.columns.size
|
357 |
+
),
|
358 |
+
show_label=True,
|
359 |
+
interactive=True,
|
360 |
+
visible=True,
|
361 |
+
render=True,
|
362 |
+
elem_classes="number-container",
|
363 |
+
),
|
364 |
+
number_create_ui(
|
365 |
+
value=1,
|
366 |
+
minimum=1,
|
367 |
+
maximum=5,
|
368 |
+
step=1,
|
369 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_TRAITS_LABEL,
|
370 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(1, 5),
|
371 |
+
show_label=True,
|
372 |
+
interactive=True,
|
373 |
+
visible=True,
|
374 |
+
render=True,
|
375 |
+
elem_classes="number-container",
|
376 |
+
),
|
377 |
+
number_create_ui(
|
378 |
+
value=0.55,
|
379 |
+
minimum=0.0,
|
380 |
+
maximum=1.0,
|
381 |
+
step=0.01,
|
382 |
+
label=config_data.Labels_THRESHOLD_CONSUMER_PREFERENCES_LABEL,
|
383 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
384 |
+
show_label=True,
|
385 |
+
interactive=True,
|
386 |
+
visible=True,
|
387 |
+
render=True,
|
388 |
+
elem_classes="number-container",
|
389 |
+
),
|
390 |
+
dropdown_create_ui(visible=False),
|
391 |
+
number_create_ui(visible=False),
|
392 |
+
number_create_ui(visible=False),
|
393 |
+
number_create_ui(visible=False),
|
394 |
+
number_create_ui(visible=False),
|
395 |
+
number_create_ui(visible=False),
|
396 |
+
)
|
397 |
+
else:
|
398 |
+
return (
|
399 |
+
practical_subtasks_selected,
|
400 |
+
gr.Column(visible=False),
|
401 |
+
dropdown_create_ui(visible=False),
|
402 |
+
number_create_ui(visible=False),
|
403 |
+
number_create_ui(visible=False),
|
404 |
+
dropdown_create_ui(visible=False),
|
405 |
+
number_create_ui(visible=False),
|
406 |
+
number_create_ui(visible=False),
|
407 |
+
number_create_ui(visible=False),
|
408 |
+
number_create_ui(visible=False),
|
409 |
+
number_create_ui(visible=False),
|
410 |
+
number_create_ui(visible=False),
|
411 |
+
number_create_ui(visible=False),
|
412 |
+
number_create_ui(visible=False),
|
413 |
+
number_create_ui(visible=False),
|
414 |
+
dropdown_create_ui(visible=False),
|
415 |
+
number_create_ui(visible=False),
|
416 |
+
number_create_ui(visible=False),
|
417 |
+
number_create_ui(visible=False),
|
418 |
+
number_create_ui(visible=False),
|
419 |
+
number_create_ui(visible=False),
|
420 |
+
)
|
app/event_handlers/practical_task_sorted.py
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: practical_task_sorted.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for the practical task sorted to the Gradio app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
from pathlib import Path
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
from app.config import config_data
|
13 |
+
from app.video_metadata import video_metadata
|
14 |
+
from app.components import video_create_ui, textbox_create_ui
|
15 |
+
|
16 |
+
|
17 |
+
def event_handler_practical_task_sorted(
|
18 |
+
files, practical_task_sorted, evt_data: gr.SelectData
|
19 |
+
):
|
20 |
+
person_id = (
|
21 |
+
int(
|
22 |
+
practical_task_sorted.iloc[evt_data.index[0]][
|
23 |
+
config_data.Dataframes_PT_SCORES[0][0]
|
24 |
+
]
|
25 |
+
)
|
26 |
+
- 1
|
27 |
+
)
|
28 |
+
|
29 |
+
if evt_data.index[0] == 0:
|
30 |
+
label = "Best"
|
31 |
+
else:
|
32 |
+
label = ""
|
33 |
+
label += " " + config_data.Dataframes_PT_SCORES[0][0]
|
34 |
+
|
35 |
+
if Path(files[person_id]).name in video_metadata:
|
36 |
+
person_metadata_list = video_metadata[Path(files[person_id]).name]
|
37 |
+
|
38 |
+
person_metadata = (
|
39 |
+
gr.Column(visible=True),
|
40 |
+
gr.Row(visible=True),
|
41 |
+
gr.Row(visible=True),
|
42 |
+
gr.Image(visible=True),
|
43 |
+
textbox_create_ui(
|
44 |
+
person_metadata_list[0],
|
45 |
+
"text",
|
46 |
+
"First name",
|
47 |
+
None,
|
48 |
+
None,
|
49 |
+
1,
|
50 |
+
True,
|
51 |
+
False,
|
52 |
+
True,
|
53 |
+
False,
|
54 |
+
1,
|
55 |
+
False,
|
56 |
+
),
|
57 |
+
gr.Row(visible=True),
|
58 |
+
gr.Image(visible=True),
|
59 |
+
textbox_create_ui(
|
60 |
+
person_metadata_list[1],
|
61 |
+
"text",
|
62 |
+
"Last name",
|
63 |
+
None,
|
64 |
+
None,
|
65 |
+
1,
|
66 |
+
True,
|
67 |
+
False,
|
68 |
+
True,
|
69 |
+
False,
|
70 |
+
1,
|
71 |
+
False,
|
72 |
+
),
|
73 |
+
gr.Row(visible=True),
|
74 |
+
gr.Row(visible=True),
|
75 |
+
gr.Image(visible=True),
|
76 |
+
textbox_create_ui(
|
77 |
+
person_metadata_list[2],
|
78 |
+
"email",
|
79 |
+
"Email",
|
80 |
+
None,
|
81 |
+
None,
|
82 |
+
1,
|
83 |
+
True,
|
84 |
+
False,
|
85 |
+
True,
|
86 |
+
False,
|
87 |
+
1,
|
88 |
+
False,
|
89 |
+
),
|
90 |
+
gr.Row(visible=True),
|
91 |
+
gr.Image(visible=True),
|
92 |
+
textbox_create_ui(
|
93 |
+
person_metadata_list[3],
|
94 |
+
"text",
|
95 |
+
"Phone number",
|
96 |
+
None,
|
97 |
+
None,
|
98 |
+
1,
|
99 |
+
True,
|
100 |
+
False,
|
101 |
+
True,
|
102 |
+
False,
|
103 |
+
1,
|
104 |
+
False,
|
105 |
+
),
|
106 |
+
)
|
107 |
+
else:
|
108 |
+
person_metadata = (
|
109 |
+
gr.Column(visible=False),
|
110 |
+
gr.Row(visible=False),
|
111 |
+
gr.Row(visible=False),
|
112 |
+
gr.Image(visible=False),
|
113 |
+
textbox_create_ui(visible=False),
|
114 |
+
gr.Row(visible=False),
|
115 |
+
gr.Image(visible=False),
|
116 |
+
textbox_create_ui(visible=False),
|
117 |
+
gr.Row(visible=False),
|
118 |
+
gr.Row(visible=False),
|
119 |
+
gr.Image(visible=False),
|
120 |
+
textbox_create_ui(visible=False),
|
121 |
+
gr.Row(visible=False),
|
122 |
+
gr.Image(visible=False),
|
123 |
+
textbox_create_ui(visible=False),
|
124 |
+
)
|
125 |
+
|
126 |
+
existing_tuple = (
|
127 |
+
gr.Column(visible=True),
|
128 |
+
video_create_ui(
|
129 |
+
value=files[person_id],
|
130 |
+
file_name=Path(files[person_id]).name,
|
131 |
+
label=f"{label} - " + str(person_id + 1),
|
132 |
+
visible=True,
|
133 |
+
elem_classes="video-sorted-container",
|
134 |
+
),
|
135 |
+
)
|
136 |
+
|
137 |
+
return existing_tuple + person_metadata
|
app/event_handlers/practical_tasks.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: practical_tasks.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Event handler for Gradio app to filter practical tasks based on selected practical tasks.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
# Importing necessary components for the Gradio app
|
9 |
+
from app.config import config_data
|
10 |
+
from app.practical_tasks import supported_practical_tasks
|
11 |
+
from app.components import radio_create_ui
|
12 |
+
|
13 |
+
|
14 |
+
def event_handler_practical_tasks(practical_tasks, practical_subtasks_selected):
|
15 |
+
return radio_create_ui(
|
16 |
+
practical_subtasks_selected[practical_tasks],
|
17 |
+
config_data.Labels_PRACTICAL_SUBTASKS_LABEL,
|
18 |
+
supported_practical_tasks[practical_tasks],
|
19 |
+
config_data.InformationMessages_PRACTICAL_SUBTASKS_INFO,
|
20 |
+
True,
|
21 |
+
True,
|
22 |
+
)
|
app/mbti_description.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: mbti_description.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Personality dimensions description.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
|
12 |
+
MBTI_DATA = pd.DataFrame(
|
13 |
+
{
|
14 |
+
"Dimension description": [
|
15 |
+
"How we interact with the world and where we direct our energy",
|
16 |
+
"The kind of information we naturally notice",
|
17 |
+
"How we make decisions",
|
18 |
+
"Whether we prefer to live in a more structured way (making decisions) or in a more spontaneous way (taking in information)",
|
19 |
+
],
|
20 |
+
"Dimension": [
|
21 |
+
"(E) Extraversion - Introversion (I)",
|
22 |
+
"(S) Sensing - Intuition (N)",
|
23 |
+
"(T) Thinking - Feeling (F)",
|
24 |
+
"(J) Judging - Perceiving (P)",
|
25 |
+
],
|
26 |
+
}
|
27 |
+
)
|
28 |
+
|
29 |
+
MBTI_DESCRIPTION = (
|
30 |
+
"<h4>Personality types of MBTI are based on four Personality Dimensions</h4>"
|
31 |
+
)
|
app/oceanai_init.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: oceanai_init.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: OceanAI initialization.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
from oceanai.modules.lab.build import Run
|
9 |
+
|
10 |
+
|
11 |
+
def oceanai_initialization():
|
12 |
+
out = False
|
13 |
+
|
14 |
+
# Π‘ΠΎΠ·Π΄Π°Π½ΠΈΠ΅ ΡΠΊΠ·Π΅ΠΌΠΏΠ»ΡΡΠ° ΠΊΠ»Π°ΡΡΠ°
|
15 |
+
_b5 = Run(lang="en", metadata=out)
|
16 |
+
|
17 |
+
# ΠΠ°ΡΡΡΠΎΠΉΠΊΠ° ΡΠ΄ΡΠ°
|
18 |
+
_b5.path_to_save_ = "./models" # ΠΠΈΡΠ΅ΠΊΡΠΎΡΠΈΡ Π΄Π»Ρ ΡΠΎΡ
ΡΠ°Π½Π΅Π½ΠΈΡ ΡΠ°ΠΉΠ»Π°
|
19 |
+
_b5.chunk_size_ = 2000000 # Π Π°Π·ΠΌΠ΅Ρ Π·Π°Π³ΡΡΠ·ΠΊΠΈ ΡΠ°ΠΉΠ»Π° ΠΈΠ· ΡΠ΅ΡΠΈ Π·Π° 1 ΡΠ°Π³
|
20 |
+
|
21 |
+
corpus = "fi"
|
22 |
+
disk = "googledisk"
|
23 |
+
|
24 |
+
# Π€ΠΎΡΠΌΠΈΡΠΎΠ²Π°Π½ΠΈΠ΅ Π°ΡΠ΄ΠΈΠΎΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ
|
25 |
+
_ = _b5.load_audio_model_hc(out=out)
|
26 |
+
_ = _b5.load_audio_model_nn(out=out)
|
27 |
+
|
28 |
+
# ΠΠ°Π³ΡΡΠ·ΠΊΠ° Π²Π΅ΡΠΎΠ² Π°ΡΠ΄ΠΈΠΎΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ
|
29 |
+
url = _b5.weights_for_big5_["audio"][corpus]["hc"][disk]
|
30 |
+
_ = _b5.load_audio_model_weights_hc(url=url, out=out)
|
31 |
+
|
32 |
+
url = _b5.weights_for_big5_["audio"][corpus]["nn"][disk]
|
33 |
+
_ = _b5.load_audio_model_weights_nn(url=url, out=out)
|
34 |
+
|
35 |
+
# Π€ΠΎΡΠΌΠΈΡΠΎΠ²Π°Π½ΠΈΠ΅ Π²ΠΈΠ΄Π΅ΠΎΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ
|
36 |
+
_ = _b5.load_video_model_hc(lang="en", out=out)
|
37 |
+
_ = _b5.load_video_model_deep_fe(out=out)
|
38 |
+
_ = _b5.load_video_model_nn(out=out)
|
39 |
+
|
40 |
+
# ΠΠ°Π³ΡΡΠ·ΠΊΠ° Π²Π΅ΡΠΎΠ² Π²ΠΈΠ΄Π΅ΠΎΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ
|
41 |
+
url = _b5.weights_for_big5_["video"][corpus]["hc"][disk]
|
42 |
+
_ = _b5.load_video_model_weights_hc(url=url, out=out)
|
43 |
+
|
44 |
+
url = _b5.weights_for_big5_["video"][corpus]["fe"][disk]
|
45 |
+
_ = _b5.load_video_model_weights_deep_fe(url=url, out=out)
|
46 |
+
|
47 |
+
url = _b5.weights_for_big5_["video"][corpus]["nn"][disk]
|
48 |
+
_ = _b5.load_video_model_weights_nn(url=url, out=out)
|
49 |
+
|
50 |
+
# ΠΠ°Π³ΡΡΠ·ΠΊΠ° ΡΠ»ΠΎΠ²Π°ΡΡ Ρ ΡΠΊΡΠΏΠ΅ΡΡΠ½ΡΠΌΠΈ ΠΏΡΠΈΠ·Π½Π°ΠΊΠ°ΠΌΠΈ (ΡΠ΅ΠΊΡΡΠΎΠ²Π°Ρ ΠΌΠΎΠ΄Π°Π»ΡΠ½ΠΎΡΡΡ)
|
51 |
+
_ = _b5.load_text_features(out=out)
|
52 |
+
|
53 |
+
# Π€ΠΎΡΠΌΠΈΡΠΎΠ²Π°Π½ΠΈΠ΅ ΡΠ΅ΠΊΡΡΠΎΠ²ΡΡ
ΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ
|
54 |
+
_ = _b5.setup_translation_model() # ΡΠΎΠ»ΡΠΊΠΎ Π΄Π»Ρ ΡΡΡΡΠΊΠΎΠ³ΠΎ ΡΠ·ΡΠΊΠ°
|
55 |
+
_ = _b5.setup_bert_encoder(force_reload=False, out=out)
|
56 |
+
_ = _b5.load_text_model_hc(corpus=corpus, out=out)
|
57 |
+
_ = _b5.load_text_model_nn(corpus=corpus, out=out)
|
58 |
+
|
59 |
+
# ΠΠ°Π³ΡΡΠ·ΠΊΠ° Π²Π΅ΡΠΎΠ² ΡΠ΅ΠΊΡΡΠΎΠ²ΡΡ
ΠΌΠΎΠ΄Π΅Π»Π΅ΠΉ
|
60 |
+
url = _b5.weights_for_big5_["text"][corpus]["hc"][disk]
|
61 |
+
_ = _b5.load_text_model_weights_hc(url=url, out=out)
|
62 |
+
|
63 |
+
url = _b5.weights_for_big5_["text"][corpus]["nn"][disk]
|
64 |
+
_ = _b5.load_text_model_weights_nn(url=url, out=out)
|
65 |
+
|
66 |
+
# Π€ΠΎΡΠΌΠΈΡΠΎΠ²Π°Π½ΠΈΠ΅ ΠΌΠΎΠ΄Π΅Π»ΠΈ Π΄Π»Ρ ΠΌΡΠ»ΡΡΠΈΠΌΠΎΠ΄Π°Π»ΡΠ½ΠΎΠ³ΠΎ ΠΎΠ±ΡΠ΅Π΄ΠΈΠ½Π΅Π½ΠΈΡ ΠΈΠ½ΡΠΎΡΠΌΠ°ΡΠΈΠΈ
|
67 |
+
_ = _b5.load_avt_model_b5(out=out)
|
68 |
+
|
69 |
+
# ΠΠ°Π³ΡΡΠ·ΠΊΠ° Π²Π΅ΡΠΎΠ² ΠΌΠΎΠ΄Π΅Π»ΠΈ Π΄Π»Ρ ΠΌΡΠ»ΡΡΠΈΠΌΠΎΠ΄Π°Π»ΡΠ½ΠΎΠ³ΠΎ ΠΎΠ±ΡΠ΅Π΄ΠΈΠ½Π΅Π½ΠΈΡ ΠΈΠ½ΡΠΎΡΠΌΠ°ΡΠΈΠΈ
|
70 |
+
url = _b5.weights_for_big5_["avt"][corpus]["b5"][disk]
|
71 |
+
_ = _b5.load_avt_model_weights_b5(url=url, out=out)
|
72 |
+
|
73 |
+
return _b5
|
74 |
+
|
75 |
+
|
76 |
+
b5 = oceanai_initialization()
|
app/practical_tasks.py
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: practical_tasks.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Utility functions for working with practical tasks data.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import yaml
|
9 |
+
from typing import Dict, List
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
|
13 |
+
|
14 |
+
def load_practical_tasks_data(file_path: str) -> List:
|
15 |
+
with open(file_path, "r") as file:
|
16 |
+
return yaml.safe_load(file) or []
|
17 |
+
|
18 |
+
|
19 |
+
def transform_practical_tasks_data(data: List) -> Dict:
|
20 |
+
output_dict = {item["task"]: item["subtasks"] for item in data}
|
21 |
+
|
22 |
+
return output_dict
|
23 |
+
|
24 |
+
|
25 |
+
yaml_file_path = "./practical_tasks.yaml"
|
26 |
+
practical_tasks_data = load_practical_tasks_data(yaml_file_path)
|
27 |
+
supported_practical_tasks = transform_practical_tasks_data(practical_tasks_data)
|
app/requirements_app.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: requirements_app.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Project requirements for the Gradio app.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
|
12 |
+
|
13 |
+
def read_requirements_to_df(file_path="requirements.txt"):
|
14 |
+
with open(file_path, "r") as file:
|
15 |
+
lines = file.readlines()
|
16 |
+
|
17 |
+
data = []
|
18 |
+
|
19 |
+
pypi = (
|
20 |
+
lambda x: f"<a href='https://pypi.org/project/{x}' target='_blank'><img src='https://img.shields.io/pypi/v/{x}' alt='PyPI' /></a>"
|
21 |
+
)
|
22 |
+
|
23 |
+
for line in lines:
|
24 |
+
line = line.strip()
|
25 |
+
if "==" in line:
|
26 |
+
library, version = line.split("==")
|
27 |
+
data.append(
|
28 |
+
{
|
29 |
+
"Library": library,
|
30 |
+
"Recommended Version": version,
|
31 |
+
"Current Version": pypi(library),
|
32 |
+
}
|
33 |
+
)
|
34 |
+
|
35 |
+
df = pd.DataFrame(data)
|
36 |
+
|
37 |
+
return df
|
app/tabs.py
ADDED
@@ -0,0 +1,694 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: tabs.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Gradio app tabs - Contains the definition of various tabs for the Gradio app interface.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import gradio as gr
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
from app.description import DESCRIPTIONS
|
12 |
+
from app.description_steps import STEP_1, STEP_2
|
13 |
+
from app.mbti_description import MBTI_DESCRIPTION, MBTI_DATA
|
14 |
+
from app.app import APP
|
15 |
+
from app.authors import AUTHORS
|
16 |
+
from app.requirements_app import read_requirements_to_df
|
17 |
+
from app.config import config_data
|
18 |
+
from app.practical_tasks import supported_practical_tasks
|
19 |
+
from app.utils import read_csv_file, extract_profession_weights
|
20 |
+
from app.components import (
|
21 |
+
html_message,
|
22 |
+
files_create_ui,
|
23 |
+
video_create_ui,
|
24 |
+
button,
|
25 |
+
dataframe,
|
26 |
+
radio_create_ui,
|
27 |
+
number_create_ui,
|
28 |
+
dropdown_create_ui,
|
29 |
+
textbox_create_ui,
|
30 |
+
)
|
31 |
+
|
32 |
+
|
33 |
+
def app_tab():
|
34 |
+
description = gr.Markdown(
|
35 |
+
value=DESCRIPTIONS[config_data.AppSettings_DEFAULT_LANG_ID]
|
36 |
+
)
|
37 |
+
|
38 |
+
step_1 = gr.HTML(value=STEP_1[config_data.AppSettings_DEFAULT_LANG_ID])
|
39 |
+
|
40 |
+
with gr.Row():
|
41 |
+
files = files_create_ui(
|
42 |
+
label="{} ({})".format(
|
43 |
+
config_data.OtherMessages_VIDEO_FILES[
|
44 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
45 |
+
],
|
46 |
+
", ".join(config_data.Settings_SUPPORTED_VIDEO_EXT),
|
47 |
+
),
|
48 |
+
file_types=[f".{ext}" for ext in config_data.Settings_SUPPORTED_VIDEO_EXT],
|
49 |
+
)
|
50 |
+
|
51 |
+
video = video_create_ui()
|
52 |
+
|
53 |
+
with gr.Row():
|
54 |
+
examples = button(
|
55 |
+
config_data.OtherMessages_EXAMPLES_APP[
|
56 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
57 |
+
],
|
58 |
+
True,
|
59 |
+
1,
|
60 |
+
"./images/examples.ico",
|
61 |
+
True,
|
62 |
+
"examples_oceanai",
|
63 |
+
)
|
64 |
+
calculate_pt_scores = button(
|
65 |
+
config_data.OtherMessages_CALCULATE_PT_SCORES[
|
66 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
67 |
+
],
|
68 |
+
False,
|
69 |
+
3,
|
70 |
+
"./images/calculate_pt_scores.ico",
|
71 |
+
True,
|
72 |
+
"calculate_oceanai",
|
73 |
+
)
|
74 |
+
clear_app = button(
|
75 |
+
config_data.OtherMessages_CLEAR_APP[
|
76 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
77 |
+
],
|
78 |
+
False,
|
79 |
+
1,
|
80 |
+
"./images/clear.ico",
|
81 |
+
True,
|
82 |
+
"clear_oceanai",
|
83 |
+
)
|
84 |
+
|
85 |
+
notifications = html_message(
|
86 |
+
config_data.InformationMessages_NOTI_VIDEOS[
|
87 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
88 |
+
],
|
89 |
+
False,
|
90 |
+
)
|
91 |
+
|
92 |
+
pt_scores = dataframe(visible=False)
|
93 |
+
|
94 |
+
csv_pt_scores = files_create_ui(
|
95 |
+
None,
|
96 |
+
"single",
|
97 |
+
[".csv"],
|
98 |
+
config_data.OtherMessages_EXPORT_PT_SCORES[
|
99 |
+
config_data.AppSettings_DEFAULT_LANG_ID
|
100 |
+
],
|
101 |
+
True,
|
102 |
+
False,
|
103 |
+
False,
|
104 |
+
"csv-container",
|
105 |
+
)
|
106 |
+
|
107 |
+
step_2 = gr.HTML(
|
108 |
+
value=STEP_2[config_data.AppSettings_DEFAULT_LANG_ID], visible=False
|
109 |
+
)
|
110 |
+
|
111 |
+
first_practical_task = next(iter(supported_practical_tasks))
|
112 |
+
|
113 |
+
with gr.Column(scale=1, visible=False, render=True) as practical_tasks_column:
|
114 |
+
practical_tasks = radio_create_ui(
|
115 |
+
first_practical_task,
|
116 |
+
config_data.Labels_PRACTICAL_TASKS_LABEL,
|
117 |
+
list(map(str, supported_practical_tasks.keys())),
|
118 |
+
config_data.InformationMessages_PRACTICAL_TASKS_INFO,
|
119 |
+
True,
|
120 |
+
True,
|
121 |
+
)
|
122 |
+
|
123 |
+
practical_subtasks = radio_create_ui(
|
124 |
+
supported_practical_tasks[first_practical_task][0],
|
125 |
+
config_data.Labels_PRACTICAL_SUBTASKS_LABEL,
|
126 |
+
supported_practical_tasks[first_practical_task],
|
127 |
+
config_data.InformationMessages_PRACTICAL_SUBTASKS_INFO,
|
128 |
+
True,
|
129 |
+
True,
|
130 |
+
)
|
131 |
+
|
132 |
+
with gr.Row(
|
133 |
+
visible=False,
|
134 |
+
render=True,
|
135 |
+
variant="default",
|
136 |
+
elem_classes="settings-container",
|
137 |
+
) as settings_practical_tasks:
|
138 |
+
dropdown_mbti = dropdown_create_ui(
|
139 |
+
label=f"Potential candidates by Personality Type of MBTI ({len(config_data.Settings_DROPDOWN_MBTI)})",
|
140 |
+
info=config_data.InformationMessages_DROPDOWN_MBTI_INFO,
|
141 |
+
choices=config_data.Settings_DROPDOWN_MBTI,
|
142 |
+
value=config_data.Settings_DROPDOWN_MBTI[0],
|
143 |
+
visible=False,
|
144 |
+
elem_classes="dropdown-container",
|
145 |
+
)
|
146 |
+
|
147 |
+
threshold_mbti = number_create_ui(
|
148 |
+
value=0.5,
|
149 |
+
minimum=0.0,
|
150 |
+
maximum=1.0,
|
151 |
+
step=0.01,
|
152 |
+
label=config_data.Labels_THRESHOLD_MBTI_LABEL,
|
153 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
154 |
+
show_label=True,
|
155 |
+
interactive=True,
|
156 |
+
visible=False,
|
157 |
+
render=True,
|
158 |
+
elem_classes="number-container",
|
159 |
+
)
|
160 |
+
|
161 |
+
threshold_professional_skills = number_create_ui(
|
162 |
+
value=0.45,
|
163 |
+
minimum=0.0,
|
164 |
+
maximum=1.0,
|
165 |
+
step=0.01,
|
166 |
+
label=config_data.Labels_THRESHOLD_PROFESSIONAL_SKILLS_LABEL,
|
167 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
168 |
+
show_label=True,
|
169 |
+
interactive=True,
|
170 |
+
visible=False,
|
171 |
+
render=True,
|
172 |
+
elem_classes="number-container",
|
173 |
+
)
|
174 |
+
|
175 |
+
dropdown_professional_skills = dropdown_create_ui(
|
176 |
+
label=f"Professional skills ({len(config_data.Settings_DROPDOWN_PROFESSIONAL_SKILLS)})",
|
177 |
+
info=config_data.InformationMessages_DROPDOWN_PROFESSIONAL_SKILLS_INFO,
|
178 |
+
choices=config_data.Settings_DROPDOWN_PROFESSIONAL_SKILLS,
|
179 |
+
value=config_data.Settings_DROPDOWN_PROFESSIONAL_SKILLS[0],
|
180 |
+
visible=False,
|
181 |
+
elem_classes="dropdown-container",
|
182 |
+
)
|
183 |
+
|
184 |
+
target_score_ope = number_create_ui(
|
185 |
+
value=config_data.Values_TARGET_SCORES[0],
|
186 |
+
minimum=0.0,
|
187 |
+
maximum=1.0,
|
188 |
+
step=0.000001,
|
189 |
+
label=config_data.Labels_TARGET_SCORE_OPE_LABEL,
|
190 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
191 |
+
show_label=True,
|
192 |
+
interactive=True,
|
193 |
+
visible=False,
|
194 |
+
render=True,
|
195 |
+
elem_classes="number-container",
|
196 |
+
)
|
197 |
+
|
198 |
+
target_score_con = number_create_ui(
|
199 |
+
value=config_data.Values_TARGET_SCORES[1],
|
200 |
+
minimum=0.0,
|
201 |
+
maximum=1.0,
|
202 |
+
step=0.000001,
|
203 |
+
label=config_data.Labels_TARGET_SCORE_CON_LABEL,
|
204 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
205 |
+
show_label=True,
|
206 |
+
interactive=True,
|
207 |
+
visible=False,
|
208 |
+
render=True,
|
209 |
+
elem_classes="number-container",
|
210 |
+
)
|
211 |
+
|
212 |
+
target_score_ext = number_create_ui(
|
213 |
+
value=config_data.Values_TARGET_SCORES[2],
|
214 |
+
minimum=0.0,
|
215 |
+
maximum=1.0,
|
216 |
+
step=0.000001,
|
217 |
+
label=config_data.Labels_TARGET_SCORE_EXT_LABEL,
|
218 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
219 |
+
show_label=True,
|
220 |
+
interactive=True,
|
221 |
+
visible=False,
|
222 |
+
render=True,
|
223 |
+
elem_classes="number-container",
|
224 |
+
)
|
225 |
+
|
226 |
+
target_score_agr = number_create_ui(
|
227 |
+
value=config_data.Values_TARGET_SCORES[3],
|
228 |
+
minimum=0.0,
|
229 |
+
maximum=1.0,
|
230 |
+
step=0.000001,
|
231 |
+
label=config_data.Labels_TARGET_SCORE_AGR_LABEL,
|
232 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
233 |
+
show_label=True,
|
234 |
+
interactive=True,
|
235 |
+
visible=False,
|
236 |
+
render=True,
|
237 |
+
elem_classes="number-container",
|
238 |
+
)
|
239 |
+
|
240 |
+
target_score_nneu = number_create_ui(
|
241 |
+
value=config_data.Values_TARGET_SCORES[4],
|
242 |
+
minimum=0.0,
|
243 |
+
maximum=1.0,
|
244 |
+
step=0.000001,
|
245 |
+
label=config_data.Labels_TARGET_SCORE_NNEU_LABEL,
|
246 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
247 |
+
show_label=True,
|
248 |
+
interactive=True,
|
249 |
+
visible=False,
|
250 |
+
render=True,
|
251 |
+
elem_classes="number-container",
|
252 |
+
)
|
253 |
+
|
254 |
+
equal_coefficient = number_create_ui(
|
255 |
+
value=0.5,
|
256 |
+
minimum=0.0,
|
257 |
+
maximum=1.0,
|
258 |
+
step=0.01,
|
259 |
+
label=config_data.Labels_EQUAL_COEFFICIENT_LABEL,
|
260 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
261 |
+
show_label=True,
|
262 |
+
interactive=True,
|
263 |
+
visible=False,
|
264 |
+
render=True,
|
265 |
+
elem_classes="number-container",
|
266 |
+
)
|
267 |
+
|
268 |
+
df_correlation_coefficients = read_csv_file(
|
269 |
+
config_data.Links_CAR_CHARACTERISTICS,
|
270 |
+
["Trait", "Style and performance", "Safety and practicality"],
|
271 |
+
)
|
272 |
+
|
273 |
+
number_priority = number_create_ui(
|
274 |
+
value=1,
|
275 |
+
minimum=1,
|
276 |
+
maximum=df_correlation_coefficients.columns.size,
|
277 |
+
step=1,
|
278 |
+
label=config_data.Labels_NUMBER_PRIORITY_LABEL,
|
279 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
280 |
+
1, df_correlation_coefficients.columns.size
|
281 |
+
),
|
282 |
+
show_label=True,
|
283 |
+
interactive=True,
|
284 |
+
visible=False,
|
285 |
+
render=True,
|
286 |
+
elem_classes="number-container",
|
287 |
+
)
|
288 |
+
|
289 |
+
number_importance_traits = number_create_ui(
|
290 |
+
value=1,
|
291 |
+
minimum=1,
|
292 |
+
maximum=5,
|
293 |
+
step=1,
|
294 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_TRAITS_LABEL,
|
295 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(1, 5),
|
296 |
+
show_label=True,
|
297 |
+
interactive=True,
|
298 |
+
visible=False,
|
299 |
+
render=True,
|
300 |
+
elem_classes="number-container",
|
301 |
+
)
|
302 |
+
|
303 |
+
threshold_consumer_preferences = number_create_ui(
|
304 |
+
value=0.55,
|
305 |
+
minimum=0.0,
|
306 |
+
maximum=1.0,
|
307 |
+
step=0.01,
|
308 |
+
label=config_data.Labels_THRESHOLD_CONSUMER_PREFERENCES_LABEL,
|
309 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(0, 1.0),
|
310 |
+
show_label=True,
|
311 |
+
interactive=True,
|
312 |
+
visible=False,
|
313 |
+
render=True,
|
314 |
+
elem_classes="number-container",
|
315 |
+
)
|
316 |
+
|
317 |
+
dropdown_candidates = dropdown_create_ui(
|
318 |
+
label=f"Potential candidates by professional responsibilities ({len(config_data.Settings_DROPDOWN_CANDIDATES)})",
|
319 |
+
info=config_data.InformationMessages_DROPDOWN_CANDIDATES_INFO,
|
320 |
+
choices=config_data.Settings_DROPDOWN_CANDIDATES,
|
321 |
+
value=config_data.Settings_DROPDOWN_CANDIDATES[0],
|
322 |
+
visible=False,
|
323 |
+
elem_classes="dropdown-container",
|
324 |
+
)
|
325 |
+
|
326 |
+
df_traits_priority_for_professions = read_csv_file(
|
327 |
+
config_data.Links_PROFESSIONS
|
328 |
+
)
|
329 |
+
weights_professions, interactive_professions = extract_profession_weights(
|
330 |
+
df_traits_priority_for_professions,
|
331 |
+
config_data.Settings_DROPDOWN_CANDIDATES[0],
|
332 |
+
)
|
333 |
+
|
334 |
+
number_openness = number_create_ui(
|
335 |
+
value=weights_professions[0],
|
336 |
+
minimum=config_data.Values_0_100[0],
|
337 |
+
maximum=config_data.Values_0_100[1],
|
338 |
+
step=1,
|
339 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_OPE_LABEL,
|
340 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
341 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
342 |
+
),
|
343 |
+
show_label=True,
|
344 |
+
interactive=interactive_professions,
|
345 |
+
visible=False,
|
346 |
+
render=True,
|
347 |
+
elem_classes="number-container",
|
348 |
+
)
|
349 |
+
|
350 |
+
number_conscientiousness = number_create_ui(
|
351 |
+
value=weights_professions[1],
|
352 |
+
minimum=config_data.Values_0_100[0],
|
353 |
+
maximum=config_data.Values_0_100[1],
|
354 |
+
step=1,
|
355 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_CON_LABEL,
|
356 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
357 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
358 |
+
),
|
359 |
+
show_label=True,
|
360 |
+
interactive=interactive_professions,
|
361 |
+
visible=False,
|
362 |
+
render=True,
|
363 |
+
elem_classes="number-container",
|
364 |
+
)
|
365 |
+
|
366 |
+
number_extraversion = number_create_ui(
|
367 |
+
value=weights_professions[2],
|
368 |
+
minimum=config_data.Values_0_100[0],
|
369 |
+
maximum=config_data.Values_0_100[1],
|
370 |
+
step=1,
|
371 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_EXT_LABEL,
|
372 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
373 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
374 |
+
),
|
375 |
+
show_label=True,
|
376 |
+
interactive=interactive_professions,
|
377 |
+
visible=False,
|
378 |
+
render=True,
|
379 |
+
elem_classes="number-container",
|
380 |
+
)
|
381 |
+
|
382 |
+
number_agreeableness = number_create_ui(
|
383 |
+
value=weights_professions[3],
|
384 |
+
minimum=config_data.Values_0_100[0],
|
385 |
+
maximum=config_data.Values_0_100[1],
|
386 |
+
step=1,
|
387 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_AGR_LABEL,
|
388 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
389 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
390 |
+
),
|
391 |
+
show_label=True,
|
392 |
+
interactive=interactive_professions,
|
393 |
+
visible=False,
|
394 |
+
render=True,
|
395 |
+
elem_classes="number-container",
|
396 |
+
)
|
397 |
+
|
398 |
+
number_non_neuroticism = number_create_ui(
|
399 |
+
value=weights_professions[4],
|
400 |
+
minimum=config_data.Values_0_100[0],
|
401 |
+
maximum=config_data.Values_0_100[1],
|
402 |
+
step=1,
|
403 |
+
label=config_data.Labels_NUMBER_IMPORTANCE_NNEU_LABEL,
|
404 |
+
info=config_data.InformationMessages_VALUE_FROM_TO_INFO.format(
|
405 |
+
config_data.Values_0_100[0], config_data.Values_0_100[1]
|
406 |
+
),
|
407 |
+
show_label=True,
|
408 |
+
interactive=interactive_professions,
|
409 |
+
visible=False,
|
410 |
+
render=True,
|
411 |
+
elem_classes="number-container",
|
412 |
+
)
|
413 |
+
|
414 |
+
calculate_practical_task = button(
|
415 |
+
config_data.OtherMessages_CALCULATE_PRACTICAL_TASK,
|
416 |
+
True,
|
417 |
+
1,
|
418 |
+
"./images/pt.ico",
|
419 |
+
False,
|
420 |
+
"calculate_practical_task",
|
421 |
+
)
|
422 |
+
|
423 |
+
with gr.Row(
|
424 |
+
visible=False,
|
425 |
+
render=True,
|
426 |
+
variant="default",
|
427 |
+
) as sorted_videos:
|
428 |
+
with gr.Column(scale=1, visible=False, render=True) as sorted_videos_column:
|
429 |
+
practical_task_sorted = dataframe(visible=False)
|
430 |
+
|
431 |
+
with gr.Accordion(
|
432 |
+
label=config_data.Labels_NOTE_MBTI_LABEL,
|
433 |
+
open=False,
|
434 |
+
visible=False,
|
435 |
+
) as mbti_accordion:
|
436 |
+
mbti_description = gr.HTML(value=MBTI_DESCRIPTION, visible=False)
|
437 |
+
|
438 |
+
mbti_description_data = dataframe(
|
439 |
+
headers=MBTI_DATA.columns.tolist(),
|
440 |
+
values=MBTI_DATA.values.tolist(),
|
441 |
+
visible=False,
|
442 |
+
elem_classes="mbti-dataframe",
|
443 |
+
)
|
444 |
+
|
445 |
+
csv_practical_task_sorted = files_create_ui(
|
446 |
+
None,
|
447 |
+
"single",
|
448 |
+
[".csv"],
|
449 |
+
config_data.OtherMessages_EXPORT_PS,
|
450 |
+
True,
|
451 |
+
False,
|
452 |
+
False,
|
453 |
+
"csv-container",
|
454 |
+
)
|
455 |
+
|
456 |
+
with gr.Column(
|
457 |
+
scale=1,
|
458 |
+
visible=False,
|
459 |
+
render=True,
|
460 |
+
elem_classes="video-column-container",
|
461 |
+
) as video_sorted_column:
|
462 |
+
video_sorted = video_create_ui(
|
463 |
+
visible=False, elem_classes="video-sorted-container"
|
464 |
+
)
|
465 |
+
|
466 |
+
with gr.Column(scale=1, visible=False, render=True) as metadata:
|
467 |
+
with gr.Row(
|
468 |
+
visible=False, render=True, variant="default"
|
469 |
+
) as metadata_1:
|
470 |
+
with gr.Row(
|
471 |
+
visible=False,
|
472 |
+
render=True,
|
473 |
+
variant="default",
|
474 |
+
elem_classes="name-container",
|
475 |
+
) as name_row:
|
476 |
+
name_logo = gr.Image(
|
477 |
+
value="images/name.svg",
|
478 |
+
container=False,
|
479 |
+
interactive=False,
|
480 |
+
show_label=False,
|
481 |
+
visible=False,
|
482 |
+
show_download_button=False,
|
483 |
+
elem_classes="metadata_name-logo",
|
484 |
+
show_fullscreen_button=False,
|
485 |
+
)
|
486 |
+
|
487 |
+
name = textbox_create_ui(
|
488 |
+
"First name",
|
489 |
+
"text",
|
490 |
+
"First name",
|
491 |
+
None,
|
492 |
+
None,
|
493 |
+
1,
|
494 |
+
True,
|
495 |
+
False,
|
496 |
+
False,
|
497 |
+
False,
|
498 |
+
1,
|
499 |
+
False,
|
500 |
+
)
|
501 |
+
|
502 |
+
with gr.Row(
|
503 |
+
visible=False,
|
504 |
+
render=True,
|
505 |
+
variant="default",
|
506 |
+
elem_classes="surname-container",
|
507 |
+
) as surname_row:
|
508 |
+
surname_logo = gr.Image(
|
509 |
+
value="images/name.svg",
|
510 |
+
container=False,
|
511 |
+
interactive=False,
|
512 |
+
show_label=False,
|
513 |
+
visible=False,
|
514 |
+
show_download_button=False,
|
515 |
+
elem_classes="metadata_surname-logo",
|
516 |
+
show_fullscreen_button=False,
|
517 |
+
)
|
518 |
+
|
519 |
+
surname = textbox_create_ui(
|
520 |
+
"Last name",
|
521 |
+
"text",
|
522 |
+
"Last name",
|
523 |
+
None,
|
524 |
+
None,
|
525 |
+
1,
|
526 |
+
True,
|
527 |
+
False,
|
528 |
+
False,
|
529 |
+
False,
|
530 |
+
1,
|
531 |
+
False,
|
532 |
+
)
|
533 |
+
with gr.Row(
|
534 |
+
visible=False, render=True, variant="default"
|
535 |
+
) as metadata_2:
|
536 |
+
with gr.Row(
|
537 |
+
visible=False,
|
538 |
+
render=True,
|
539 |
+
variant="default",
|
540 |
+
elem_classes="email-container",
|
541 |
+
) as email_row:
|
542 |
+
email_logo = gr.Image(
|
543 |
+
value="images/email.svg",
|
544 |
+
container=False,
|
545 |
+
interactive=False,
|
546 |
+
show_label=False,
|
547 |
+
visible=False,
|
548 |
+
show_download_button=False,
|
549 |
+
elem_classes="metadata_email-logo",
|
550 |
+
show_fullscreen_button=False,
|
551 |
+
)
|
552 |
+
|
553 |
+
email = textbox_create_ui(
|
554 |
+
"example@example.com",
|
555 |
+
"email",
|
556 |
+
"Email",
|
557 |
+
None,
|
558 |
+
None,
|
559 |
+
1,
|
560 |
+
True,
|
561 |
+
False,
|
562 |
+
False,
|
563 |
+
False,
|
564 |
+
1,
|
565 |
+
False,
|
566 |
+
)
|
567 |
+
|
568 |
+
with gr.Row(
|
569 |
+
visible=False,
|
570 |
+
render=True,
|
571 |
+
variant="default",
|
572 |
+
elem_classes="phone-container",
|
573 |
+
) as phone_row:
|
574 |
+
phone_logo = gr.Image(
|
575 |
+
value="images/phone.svg",
|
576 |
+
container=False,
|
577 |
+
interactive=False,
|
578 |
+
show_label=False,
|
579 |
+
visible=False,
|
580 |
+
show_download_button=False,
|
581 |
+
elem_classes="metadata_phone-logo",
|
582 |
+
show_fullscreen_button=False,
|
583 |
+
)
|
584 |
+
|
585 |
+
phone = textbox_create_ui(
|
586 |
+
"+1 (555) 123-4567",
|
587 |
+
"text",
|
588 |
+
"Phone number",
|
589 |
+
None,
|
590 |
+
None,
|
591 |
+
1,
|
592 |
+
True,
|
593 |
+
False,
|
594 |
+
False,
|
595 |
+
False,
|
596 |
+
1,
|
597 |
+
False,
|
598 |
+
)
|
599 |
+
|
600 |
+
practical_subtasks_selected = gr.JSON(
|
601 |
+
value={
|
602 |
+
str(task): supported_practical_tasks.get(task, [None])[0]
|
603 |
+
for task in supported_practical_tasks.keys()
|
604 |
+
},
|
605 |
+
visible=False,
|
606 |
+
render=True,
|
607 |
+
)
|
608 |
+
|
609 |
+
in_development = html_message(
|
610 |
+
config_data.InformationMessages_NOTI_IN_DEV, False, False
|
611 |
+
)
|
612 |
+
|
613 |
+
return (
|
614 |
+
description,
|
615 |
+
step_1,
|
616 |
+
notifications,
|
617 |
+
files,
|
618 |
+
video,
|
619 |
+
examples,
|
620 |
+
calculate_pt_scores,
|
621 |
+
clear_app,
|
622 |
+
pt_scores,
|
623 |
+
csv_pt_scores,
|
624 |
+
step_2,
|
625 |
+
practical_tasks,
|
626 |
+
practical_subtasks,
|
627 |
+
settings_practical_tasks,
|
628 |
+
dropdown_mbti,
|
629 |
+
threshold_mbti,
|
630 |
+
threshold_professional_skills,
|
631 |
+
dropdown_professional_skills,
|
632 |
+
target_score_ope,
|
633 |
+
target_score_con,
|
634 |
+
target_score_ext,
|
635 |
+
target_score_agr,
|
636 |
+
target_score_nneu,
|
637 |
+
equal_coefficient,
|
638 |
+
number_priority,
|
639 |
+
number_importance_traits,
|
640 |
+
threshold_consumer_preferences,
|
641 |
+
dropdown_candidates,
|
642 |
+
number_openness,
|
643 |
+
number_conscientiousness,
|
644 |
+
number_extraversion,
|
645 |
+
number_agreeableness,
|
646 |
+
number_non_neuroticism,
|
647 |
+
calculate_practical_task,
|
648 |
+
practical_subtasks_selected,
|
649 |
+
practical_tasks_column,
|
650 |
+
sorted_videos,
|
651 |
+
sorted_videos_column,
|
652 |
+
practical_task_sorted,
|
653 |
+
csv_practical_task_sorted,
|
654 |
+
mbti_accordion,
|
655 |
+
mbti_description,
|
656 |
+
mbti_description_data,
|
657 |
+
video_sorted_column,
|
658 |
+
video_sorted,
|
659 |
+
metadata,
|
660 |
+
metadata_1,
|
661 |
+
name_row,
|
662 |
+
name_logo,
|
663 |
+
name,
|
664 |
+
surname_row,
|
665 |
+
surname_logo,
|
666 |
+
surname,
|
667 |
+
metadata_2,
|
668 |
+
email_row,
|
669 |
+
email_logo,
|
670 |
+
email,
|
671 |
+
phone_row,
|
672 |
+
phone_logo,
|
673 |
+
phone,
|
674 |
+
in_development,
|
675 |
+
)
|
676 |
+
|
677 |
+
|
678 |
+
def about_app_tab():
|
679 |
+
return gr.HTML(value=APP)
|
680 |
+
|
681 |
+
|
682 |
+
def about_authors_tab():
|
683 |
+
return gr.HTML(value=AUTHORS)
|
684 |
+
|
685 |
+
|
686 |
+
def requirements_app_tab():
|
687 |
+
requirements_df = read_requirements_to_df()
|
688 |
+
|
689 |
+
return dataframe(
|
690 |
+
headers=requirements_df.columns.tolist(),
|
691 |
+
values=requirements_df.values.tolist(),
|
692 |
+
visible=True,
|
693 |
+
elem_classes="requirements-dataframe",
|
694 |
+
)
|
app/utils.py
ADDED
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: utils.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Utility functions.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
|
10 |
+
# Importing necessary components for the Gradio app
|
11 |
+
from app.config import config_data
|
12 |
+
|
13 |
+
|
14 |
+
def get_language_settings(language):
|
15 |
+
language_mappings = {
|
16 |
+
"english": (0, config_data.Settings_LANGUAGES_EN),
|
17 |
+
"Π°Π½Π³Π»ΠΈΠΉΡΠΊΠΈΠΉ": (0, config_data.Settings_LANGUAGES_EN),
|
18 |
+
"russian": (1, config_data.Settings_LANGUAGES_RU),
|
19 |
+
"ΡΡΡΡΠΊΠΈΠΉ": (1, config_data.Settings_LANGUAGES_RU),
|
20 |
+
}
|
21 |
+
|
22 |
+
normalized_language = language.lower()
|
23 |
+
|
24 |
+
lang_id, choices = language_mappings.get(
|
25 |
+
normalized_language, (0, config_data.Settings_LANGUAGES_EN)
|
26 |
+
)
|
27 |
+
|
28 |
+
return lang_id, choices
|
29 |
+
|
30 |
+
|
31 |
+
def preprocess_scores_df(df, name):
|
32 |
+
df.index.name = name
|
33 |
+
df.index += 1
|
34 |
+
df.index = df.index.map(str)
|
35 |
+
|
36 |
+
return df
|
37 |
+
|
38 |
+
|
39 |
+
def read_csv_file(file_path, drop_columns=[]):
|
40 |
+
df = pd.read_csv(file_path)
|
41 |
+
|
42 |
+
if len(drop_columns) != 0:
|
43 |
+
df = pd.DataFrame(df.drop(drop_columns, axis=1))
|
44 |
+
|
45 |
+
return preprocess_scores_df(df, "ID")
|
46 |
+
|
47 |
+
|
48 |
+
def round_numeric_values(x):
|
49 |
+
if isinstance(x, (int, float)):
|
50 |
+
return round(x, 3)
|
51 |
+
|
52 |
+
return x
|
53 |
+
|
54 |
+
|
55 |
+
def apply_rounding_and_rename_columns(df):
|
56 |
+
df_rounded = df.rename(
|
57 |
+
columns={
|
58 |
+
"Openness": "OPE",
|
59 |
+
"Conscientiousness": "CON",
|
60 |
+
"Extraversion": "EXT",
|
61 |
+
"Agreeableness": "AGR",
|
62 |
+
"Non-Neuroticism": "NNEU",
|
63 |
+
}
|
64 |
+
)
|
65 |
+
|
66 |
+
columns_to_round = df_rounded.columns[1:]
|
67 |
+
df_rounded[columns_to_round] = df_rounded[columns_to_round].applymap(
|
68 |
+
round_numeric_values
|
69 |
+
)
|
70 |
+
|
71 |
+
return df_rounded
|
72 |
+
|
73 |
+
|
74 |
+
def extract_profession_weights(df, dropdown_candidates):
|
75 |
+
try:
|
76 |
+
weights_professions = df.loc[df["Profession"] == dropdown_candidates, :].values[
|
77 |
+
0
|
78 |
+
][1:]
|
79 |
+
interactive_professions = False
|
80 |
+
except Exception:
|
81 |
+
weights_professions = [0] * 5
|
82 |
+
interactive_professions = True
|
83 |
+
else:
|
84 |
+
weights_professions = list(map(int, weights_professions))
|
85 |
+
|
86 |
+
return weights_professions, interactive_professions
|
app/video_metadata.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
File: video_metadata.py
|
3 |
+
Author: Elena Ryumina and Dmitry Ryumin
|
4 |
+
Description: Utility functions for working with video metadata.
|
5 |
+
License: MIT License
|
6 |
+
"""
|
7 |
+
|
8 |
+
import yaml
|
9 |
+
from typing import List, Dict
|
10 |
+
|
11 |
+
# Importing necessary components for the Gradio app
|
12 |
+
|
13 |
+
|
14 |
+
def load_video_metadata(file_path: str) -> Dict[str, List]:
|
15 |
+
with open(file_path, "r") as file:
|
16 |
+
video_metadata = yaml.safe_load(file) or {}
|
17 |
+
result = {}
|
18 |
+
for key, value in video_metadata.get("video_metadata", {}).items():
|
19 |
+
alias = key.split("_")[0]
|
20 |
+
result[key] = value + [f"video{alias}"]
|
21 |
+
return result
|
22 |
+
|
23 |
+
|
24 |
+
yaml_file_path = "./video_metadata.yaml"
|
25 |
+
video_metadata = load_video_metadata(yaml_file_path)
|
config.toml
ADDED
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[AppSettings]
|
2 |
+
APP_VERSION = "0.9.2"
|
3 |
+
CSS_PATH = "app.css"
|
4 |
+
DEFAULT_LANG_ID = 0
|
5 |
+
|
6 |
+
[InformationMessages]
|
7 |
+
DESCRIPTIONS = [
|
8 |
+
"is an open-source framework for Big Five personality traits assessment and HR-processes automatization",
|
9 |
+
"- Π±ΠΈΠ±Π»ΠΈΠΎΡΠ΅ΠΊΠ° Ρ ΠΎΡΠΊΡΡΡΡΠΌ ΠΈΡΡ
ΠΎΠ΄Π½ΡΠΌ ΠΊΠΎΠ΄ΠΎΠΌ Π΄Π»Ρ ΠΎΡΠ΅Π½ΠΈΠ²Π°Π½ΠΈΡ Π±ΠΎΠ»ΡΡΠΎΠΉ ΠΏΡΡΠ΅ΡΠΊΠΈ ΠΊΠ°ΡΠ΅ΡΡΠ² Π»ΠΈΡΠ½ΠΎΡΡΠΈ ΡΠ΅Π»ΠΎΠ²Π΅ΠΊΠ° ΠΈ Π°Π²ΡΠΎΠΌΠ°ΡΠΈΠ·Π°ΡΠΈΠΈ HR-ΠΏΡΠΎΡΠ΅ΡΡΠΎΠ²",
|
10 |
+
]
|
11 |
+
NOTI_VIDEOS = ["Select the video(s)", "ΠΡΠ±Π΅ΡΠΈΡΠ΅ Π²ΠΈΠ΄Π΅ΠΎ"]
|
12 |
+
PRACTICAL_TASKS_INFO = "Choose a practical task"
|
13 |
+
PRACTICAL_SUBTASKS_INFO = "Choose a practical subtask"
|
14 |
+
NOTI_IN_DEV = "In development"
|
15 |
+
DROPDOWN_MBTI_INFO = "What personality type indicator are you interested in?"
|
16 |
+
DROPDOWN_PROFESSIONAL_SKILLS_INFO = "What professional skill are you interested in?"
|
17 |
+
DROPDOWN_DROPDOWN_COLLEAGUES_INFO = "What colleague are you interested in?"
|
18 |
+
DROPDOWN_CANDIDATES_INFO = "What profession are you interested in?"
|
19 |
+
DROPDOWN_LANGUAGES_INFO = "Select the language of the app"
|
20 |
+
VALUE_FROM_TO_INFO = "Set value from {} to {}"
|
21 |
+
SUM_WEIGHTS = "The sum of the weights of the personality traits should be 100, not {}"
|
22 |
+
STEP_1 = [
|
23 |
+
"Step 1: Calculation of personality traits scores",
|
24 |
+
"Π¨Π°Π³ 1: ΠΡΡΠΈΡΠ»Π΅Π½ΠΈΠ΅ ΠΎΡΠ΅Π½ΠΎΠΊ ΠΏΠ΅ΡΡΠΎΠ½Π°Π»ΡΠ½ΡΡ
ΠΊΠ°ΡΠ΅ΡΡΠ² Π»ΠΈΡΠ½ΠΎΡΡΠΈ ΡΠ΅Π»ΠΎΠ²Π΅ΠΊΠ°",
|
25 |
+
]
|
26 |
+
STEP_2 = [
|
27 |
+
"Step 2: Solving practical task",
|
28 |
+
"Π¨Π°Π³ 2: Π Π΅ΡΠ΅Π½ΠΈΠ΅ ΠΏΡΠ°ΠΊΡΠΈΡΠ΅ΡΠΊΠΎΠΉ Π·Π°Π΄Π°ΡΠΈ",
|
29 |
+
]
|
30 |
+
|
31 |
+
[OtherMessages]
|
32 |
+
VIDEO_FILES = ["Video Files", "ΠΠΈΠ΄Π΅ΠΎΡΠ°ΠΉΠ»Ρ"]
|
33 |
+
VIDEO_PLAYER = ["Video Player", "ΠΠΈΠ΄Π΅ΠΎ ΠΏΡΠΎΠΈΠ³ΡΡΠ²Π°ΡΠ΅Π»Ρ"]
|
34 |
+
CALCULATE_PT_SCORES = ["Calculation of Big Five personality traits scores", "ΠΡΡΠΈΡΠ»Π΅Π½ΠΈΠ΅ ΠΏΠΎΠΊΠ°Π·Π°ΡΠ΅Π»Π΅ΠΉ ΠΠΎΠ»ΡΡΠΎΠΉ ΠΏΡΡΠ΅ΡΠΊΠΈ ΠΏΠ΅ΡΡΠΎΠ½Π°Π»ΡΠ½ΡΡ
ΠΊΠ°ΡΠ΅ΡΡΠ² Π»ΠΈΡΠ½ΠΎΡΡΠΈ ΡΠ΅Π»ΠΎΠ²Π΅ΠΊΠ°"]
|
35 |
+
CALCULATE_PT_SCORES_ERR = "Personality traits scores have not been calculated. Try uploading a different file(s)"
|
36 |
+
CALCULATE_PRACTICAL_TASK = "Solving practical task"
|
37 |
+
CLEAR_APP = ["Clear", "Π‘Π±ΡΠΎΡ"]
|
38 |
+
EXAMPLES_APP = ["Examples", "ΠΡΠΈΠΌΠ΅ΡΡ"]
|
39 |
+
EXPORT_PT_SCORES = [
|
40 |
+
"Export Big Five personality traits to a CSV file",
|
41 |
+
"ΠΠΊΡΠΏΠΎΡΡ ΠΏΠΎΠΊΠ°Π·Π°ΡΠ΅Π»Π΅ΠΉ ΠΠΎΠ»ΡΡΠΎΠΉ ΠΏΡΡΠ΅ΡΠΊΠΈ ΠΏΠ΅ΡΡΠΎΠ½Π°Π»ΡΠ½ΡΡ
ΠΊΠ°ΡΠ΅ΡΡΠ² Π»ΠΈΡΠ½ΠΎΡΡΠΈ ΡΠ΅Π»ΠΎΠ²Π΅ΠΊΠ° Π² CSV ΡΠ°ΠΉΠ»"]
|
42 |
+
EXPORT_PG = "Export ranking professional groups results to a CSV file"
|
43 |
+
EXPORT_PS = "Export ranking professional skill results to a CSV file"
|
44 |
+
EXPORT_WT = "Export ranking effective work teams results to a CSV file"
|
45 |
+
EXPORT_CP = "Export consumer preferences for industrial goods results to a CSV file"
|
46 |
+
EXPORT_MBTI = "Export ranking personality type results to a CSV file"
|
47 |
+
NOTI_CALCULATE = ["You can calculate Big Five personality traits scores", "ΠΡ ΠΌΠΎΠΆΠ΅ΡΠ΅ ΡΠ°ΡΡΡΠΈΡΠ°ΡΡ ΠΏΠΎΠΊΠ°Π·Π°ΡΠ΅Π»ΠΈ ΠΠΎΠ»ΡΡΠΎΠΉ ΠΏΡΡΠ΅ΡΠΊΠΈ ΠΏΠ΅ΡΡΠΎΠ½Π°Π»ΡΠ½ΡΡ
ΠΊΠ°ΡΠ΅ΡΡΠ² Π»ΠΈΡΠ½ΠΎΡΡΠΈ ΡΠ΅Π»ΠΎΠ²Π΅ΠΊΠ°"]
|
48 |
+
|
49 |
+
[Labels]
|
50 |
+
APP_LABEL = ["β App", "β ΠΡΠΈΠ»ΠΎΠΆΠ΅Π½ΠΈΠ΅"]
|
51 |
+
ABOUT_APP_LABEL = ["π‘ About the App", "π‘ Π ΠΏΡΠΈΠ»ΠΎΠΆΠ΅Π½ΠΈΠΈ"]
|
52 |
+
ABOUT_AUTHORS_LABEL = ["π About the Authors", "π ΠΠ± Π°Π²ΡΠΎΡΠ°Ρ
"]
|
53 |
+
REQUIREMENTS_LABEL = ["π Requirements", "π ΠΠ°Π²ΠΈΡΠΈΠΌΠΎΡΡΠΈ"]
|
54 |
+
PRACTICAL_TASKS_LABEL = "Practical tasks"
|
55 |
+
PRACTICAL_SUBTASKS_LABEL = "Practical subtasks"
|
56 |
+
THRESHOLD_MBTI_LABEL = "Polarity traits threshold"
|
57 |
+
THRESHOLD_PROFESSIONAL_SKILLS_LABEL = "Polarity traits threshold"
|
58 |
+
TARGET_SCORE_OPE_LABEL = "Openness target score"
|
59 |
+
TARGET_SCORE_CON_LABEL = "Conscientiousness target score"
|
60 |
+
TARGET_SCORE_EXT_LABEL = "Extraversion target score"
|
61 |
+
TARGET_SCORE_AGR_LABEL = "Agreeableness target score"
|
62 |
+
TARGET_SCORE_NNEU_LABEL = "Non-Neuroticism target score"
|
63 |
+
EQUAL_COEFFICIENT_LABEL = "Equal coefficient"
|
64 |
+
NUMBER_PRIORITY_LABEL = "Priority number"
|
65 |
+
NUMBER_IMPORTANCE_TRAITS_LABEL = "Importance traits number"
|
66 |
+
NUMBER_IMPORTANCE_OPE_LABEL = "Openness weight"
|
67 |
+
NUMBER_IMPORTANCE_CON_LABEL = "Conscientiousness weight"
|
68 |
+
NUMBER_IMPORTANCE_EXT_LABEL = "Extraversion weight"
|
69 |
+
NUMBER_IMPORTANCE_AGR_LABEL = "Agreeableness weight"
|
70 |
+
NUMBER_IMPORTANCE_NNEU_LABEL = "Non-Neuroticism weight"
|
71 |
+
THRESHOLD_CONSUMER_PREFERENCES_LABEL = "Polarity traits threshold"
|
72 |
+
LANGUAGES_LABEL = "Languages"
|
73 |
+
NOTE_MBTI_LABEL = "Note: What are Personality Types of MBTI?"
|
74 |
+
APP_VERSION = ["Version", "ΠΠ΅ΡΡΠΈΡ"]
|
75 |
+
|
76 |
+
[TabCreators]
|
77 |
+
"β App" = "app_tab"
|
78 |
+
"π‘ About the App" = "about_app_tab"
|
79 |
+
"π About the Authors" = "about_authors_tab"
|
80 |
+
"π Requirements" = "requirements_app_tab"
|
81 |
+
|
82 |
+
[Filenames]
|
83 |
+
PT_SCORES = "personality_traits_scores.csv"
|
84 |
+
PT_SKILLS_SCORES = "personality_skills_scores.csv"
|
85 |
+
COLLEAGUE_RANKING = "_colleague_ranking.csv"
|
86 |
+
CAR_CHARACTERISTICS = "auto_characteristics_priorities.csv"
|
87 |
+
MDA_CATEGORIES = "divice_characteristics_priorities.csv"
|
88 |
+
POTENTIAL_CANDIDATES = "potential_candidates.csv"
|
89 |
+
MBTI_JOB = "mbti_job_match.csv"
|
90 |
+
|
91 |
+
[Dataframes]
|
92 |
+
PT_SCORES = [
|
93 |
+
[
|
94 |
+
"Person ID", "Path", "Openness", "Conscientiousness", "Extraversion", "Agreeableness", "Non-Neuroticism"
|
95 |
+
],
|
96 |
+
[
|
97 |
+
"ΠΠ΄Π΅Π½ΡΠΈΡΠΈΠΊΠ°ΡΠΎΡ", "ΠΠΌΡ ΡΠ°ΠΉΠ»Π°",
|
98 |
+
"ΠΡΠΊΡΡΡΠΎΡΡΡ ΠΊ ΠΎΠΏΡΡΡ", "ΠΠΎΠ±ΡΠΎΡΠΎΠ²Π΅ΡΡΠ½ΠΎΡΡΡ", "ΠΠΊΡΡΡΠΎΠ²Π΅ΡΡΠΈΡ", "ΠΠΎΠ±ΡΠΎΠΆΠ΅Π»Π°ΡΠ΅Π»ΡΠ½ΠΎΡΡΡ", "ΠΠΌΠΎΡΠΈΠΎΠ½Π°Π»ΡΠ½Π°Ρ ΡΡΠ°Π±ΠΈΠ»ΡΠ½ΠΎΡΡΡ"
|
99 |
+
]
|
100 |
+
]
|
101 |
+
|
102 |
+
[Images]
|
103 |
+
LANGUAGES = ["UK.png", "RU.png"]
|
104 |
+
|
105 |
+
[StaticPaths]
|
106 |
+
IMAGES = "images/"
|
107 |
+
|
108 |
+
[Settings]
|
109 |
+
LANGUAGES_EN = ["English", "Russian"]
|
110 |
+
LANGUAGES_RU = ["ΠΠ½Π³Π»ΠΈΠΉΡΠΊΠΈΠΉ", "Π ΡΡΡΠΊΠΈΠΉ"]
|
111 |
+
SHORT_PROFESSIONAL_SKILLS = ["OPE", "CON", "EXT", "AGR", "NNEU"]
|
112 |
+
DROPDOWN_PROFESSIONAL_SKILLS = ["Analytical", "Interactive", "Routine", "Non-Routine"]
|
113 |
+
DROPDOWN_COLLEAGUES = ["major", "minor"]
|
114 |
+
DROPDOWN_CANDIDATES = [
|
115 |
+
"Managers/executives",
|
116 |
+
"Entrepreneurship",
|
117 |
+
"Social/Non profit making professions",
|
118 |
+
"Public sector professions",
|
119 |
+
"Scientists/researchers, and engineers",
|
120 |
+
"Custom",
|
121 |
+
]
|
122 |
+
DROPDOWN_MBTI = [
|
123 |
+
"The Inspector (ISTJ): Accountant, Auditor, Budget Analyst, Financial Manager, Developer, Systems Analyst, Librarian etc.",
|
124 |
+
"The Protector (ISFJ): Nurse, Doctor, Veterinarian or Veterinary Nurse/Assistant, Social Worker, Agricultural or Food Scientist, Secretary, Driver, etc.",
|
125 |
+
"The Counselor (INFJ): Psychologist, Human Resources Professional, Office Manager, Training Specialist, Graphic Designer, etc.",
|
126 |
+
"The Mastermind (INTJ): Animator, Architect, Content Writer, Photographer, TV Journalist, Video Editor, Business Development, Executive, Professor, etc.",
|
127 |
+
"The Crafter (ISTP): Engineer, Technician, Construction Worker, Inspector, Forensic Scientist, Software Engineer, Computer Programmer, etc.",
|
128 |
+
"The Composer (ISFP): Marketing Assistant, Dancer, Chef, Office Administrator, Artist, Interior Designer, Legal Secretary, Nurse, etc.",
|
129 |
+
"The Healer (INFP): Writer, Multimedia Designer, Customer Relations Manager, Special Education Teacher, Coach, Editor, Fashion Designer, etc.",
|
130 |
+
"The Architect (INTP): Technical Writer, Web Developer, Information Security Analyst, Researcher, Scientist, Lawyer, etc.",
|
131 |
+
"The Promoter (ESTP): Customer Care Specialist, Actor, Personal Trainer, Brand Ambassador, Manager, Entrepreneur, Creative Director, Police Officer, Marketing Officer, Manufacturer, etc.",
|
132 |
+
"The Performer (ESFP): Flight Attendant, Entertainer, Teacher, Public Relations Manager, Sales Representative, Event Planner, etc.",
|
133 |
+
"The Champion (ENFP): Healthcare Professional, Producer, Retail Sales Associate, Customer Service; Screenwriter; TV/Radio Host, etc.",
|
134 |
+
"The Visionary (ENTP): Engineer, Market Researcher, Social Media Manager, Management Analyst, Digital Marketing Executive, Business Consultant, Game Designer/Developer, Sales Manager, etc.",
|
135 |
+
"The Supervisor (ESTJ): Managing Director, Hotel Manager, Finance Officer, Judge, Real Estate Agent, Chief Executive Officer, Chef, Business Development Manager, Telemarketer, etc.",
|
136 |
+
"The Provider (ESFJ): Technical Support Specialist, Account Manager, College Professor, Medical Researcher, Bookkeeper, Photojournalist, etc.",
|
137 |
+
"The Teacher (ENFJ): Public Relations Manager, Sales Manager, Human Resource Director, Art Director, Counselor, etc.",
|
138 |
+
"The Commander (ENTJ): Construction Supervisor, Health Services Administrator, Financial Accountant, Auditor, Lawyer, School Principal, Chemical Engineer, Database Manager, etc.",
|
139 |
+
]
|
140 |
+
DROPDOWN_MBTI_DEL_COLS = ["EI", "SN", "TF", "JP", "Match"]
|
141 |
+
SHOW_VIDEO_METADATA = true
|
142 |
+
SUPPORTED_VIDEO_EXT = ["mp4", "mov", "avi", "flv"]
|
143 |
+
|
144 |
+
[Values]
|
145 |
+
TARGET_SCORES = [0.527886, 0.522337, 0.458468, 0.51761, 0.444649]
|
146 |
+
0_100 = [0, 100]
|
147 |
+
|
148 |
+
[Links]
|
149 |
+
PROFESSIONAL_SKILLS = "https://download.sberdisk.ru/download/file/478678231?token=0qiZwliLtHWWYMv&filename=professional_skills.csv"
|
150 |
+
FINDING_COLLEAGUE = "https://download.sberdisk.ru/download/file/478675819?token=LuB7L1QsEY0UuSs&filename=colleague_ranking.csv"
|
151 |
+
CAR_CHARACTERISTICS = "https://download.sberdisk.ru/download/file/478675818?token=EjfLMqOeK8cfnOu&filename=auto_characteristics.csv"
|
152 |
+
MDA_CATEGORIES = "https://download.sberdisk.ru/download/file/478676690?token=7KcAxPqMpWiYQnx&filename=divice_characteristics.csv"
|
153 |
+
CLOTHING_SC = "https://download.sberdisk.ru/download/file/493644097?token=KGtSGMxjZtWXmBz&filename=df_%D1%81lothing_style_correlation.csv"
|
154 |
+
PROFESSIONS = "https://download.sberdisk.ru/download/file/478675798?token=fF5fNZVpthQlEV0&filename=traits_priority_for_professions.csv"
|
155 |
+
MBTI = "https://download.sberdisk.ru/download/file/493644095?token=EX7hFxNJhMoLumI&filename=df_mbti_correlation.csv"
|
images/AA.jpg
ADDED
images/RU.png
ADDED
images/UK.png
ADDED
images/calculate_pt_scores.ico
ADDED
images/clear.ico
ADDED
images/email.svg
ADDED
images/examples.ico
ADDED
images/name.svg
ADDED
images/phone.svg
ADDED
images/pt.ico
ADDED
practical_tasks.yaml
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
- task: "Ranking potential candidates by professional responsibilities"
|
2 |
+
subtasks:
|
3 |
+
- "16 Personality Types of MBTI"
|
4 |
+
- "Professional groups"
|
5 |
+
- "Professional skills"
|
6 |
+
- task: "Forming effective work teams"
|
7 |
+
subtasks:
|
8 |
+
- "Finding a suitable junior colleague"
|
9 |
+
- "Finding a suitable senior colleague"
|
10 |
+
- task: "Predicting consumer preferences for industrial goods"
|
11 |
+
subtasks:
|
12 |
+
- "Car characteristics"
|
13 |
+
- "Mobile device application categories"
|
14 |
+
- "Clothing style correlation"
|
requirements.txt
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
gradio==5.5.0
|
2 |
+
spaces==0.30.4
|
3 |
+
PyYAML==6.0.2
|
4 |
+
toml==0.10.2
|
5 |
+
oceanai==1.0.0a43
|
video_metadata.yaml
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
video_metadata:
|
2 |
+
1_a6a198e51d073b0.mp4:
|
3 |
+
- Michael
|
4 |
+
- Brown
|
5 |
+
- michael.brown@example.com
|
6 |
+
- "+1 (555) 234-5678"
|
7 |
+
2_9093a4ca3c0c834.mp4:
|
8 |
+
- Emily
|
9 |
+
- Taylor
|
10 |
+
- emily.taylor@example.com
|
11 |
+
- "+1 (555) 456-7890"
|
12 |
+
3_9987232dd677712.mp4:
|
13 |
+
- Jack
|
14 |
+
- Smith
|
15 |
+
- jack.smith@example.com
|
16 |
+
- "+1 (555) 123-4567"
|
17 |
+
4_6af8f60549cdf8d.mp4:
|
18 |
+
- Olivia
|
19 |
+
- Johnson
|
20 |
+
- olivia.johnson@example.com
|
21 |
+
- "+1 (555) 567-8901"
|
22 |
+
5_7b6dc9535953642.mp4:
|
23 |
+
- David
|
24 |
+
- Miller
|
25 |
+
- david.miller@example.com
|
26 |
+
- "+1 (555) 345-6789"
|
27 |
+
6_17a33049c100d3c.mp4:
|
28 |
+
- Sophia
|
29 |
+
- Wilson
|
30 |
+
- sophia.wilson@example.com
|
31 |
+
- "+1 (555) 678-9012"
|