Spaces:
Runtime error
Runtime error
Zekun Wu
commited on
Commit
Β·
a7e2fc8
1
Parent(s):
ea8d9fe
update path
Browse files- .idea/Multidimensional_Multilevel_Bias_Detection.iml +1 -1
- .idea/misc.xml +1 -1
- README.md +1 -1
- app.py +3 -4
- bias_detector/__init__.py +0 -1
- bias_detector/__pycache__/bias_detector.cpython-311.pyc +0 -0
- stereotype_detector/__init__.py +1 -0
- {bias_detector β stereotype_detector}/__pycache__/__init__.cpython-311.pyc +0 -0
- bias_detector/bias_detector.py β stereotype_detector/stereotype_detector.py +6 -22
.idea/Multidimensional_Multilevel_Bias_Detection.iml
CHANGED
@@ -4,7 +4,7 @@
|
|
4 |
<content url="file://$MODULE_DIR$">
|
5 |
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
6 |
</content>
|
7 |
-
<orderEntry type="jdk" jdkName="Python 3.9 (
|
8 |
<orderEntry type="sourceFolder" forTests="false" />
|
9 |
</component>
|
10 |
</module>
|
|
|
4 |
<content url="file://$MODULE_DIR$">
|
5 |
<excludeFolder url="file://$MODULE_DIR$/venv" />
|
6 |
</content>
|
7 |
+
<orderEntry type="jdk" jdkName="Python 3.9 (Stereotype_Detection)" jdkType="Python SDK" />
|
8 |
<orderEntry type="sourceFolder" forTests="false" />
|
9 |
</component>
|
10 |
</module>
|
.idea/misc.xml
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
<project version="4">
|
3 |
-
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (
|
4 |
</project>
|
|
|
1 |
<?xml version="1.0" encoding="UTF-8"?>
|
2 |
<project version="4">
|
3 |
+
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (Stereotype_Detection)" project-jdk-type="Python SDK" />
|
4 |
</project>
|
README.md
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
emoji: π
|
4 |
colorFrom: blue
|
5 |
colorTo: yellow
|
|
|
1 |
---
|
2 |
+
title: Stereotype_Detection
|
3 |
emoji: π
|
4 |
colorFrom: blue
|
5 |
colorTo: yellow
|
app.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
import streamlit as st
|
2 |
-
from
|
3 |
|
4 |
-
st.title("
|
5 |
|
6 |
EXAMPLES = {
|
7 |
"Gender": "The baby loved the presence of his caring mommy.",
|
@@ -34,11 +34,10 @@ def format_results(results, bias_level):
|
|
34 |
return formatted
|
35 |
|
36 |
level = st.selectbox("Select the Detection Levels:", ("Sentence","Token"))
|
37 |
-
dimension = st.selectbox("Select the Stereotype Dimensions:", ("All","Gender","Religion","Race","Profession"))
|
38 |
|
39 |
if st.button("Load Models"):
|
40 |
with st.spinner('Loading models...'):
|
41 |
-
st.session_state["detector"] = Detector(level
|
42 |
dummy_sentence = "This is a dummy sentence."
|
43 |
dummy_result = st.session_state["detector"].predict([dummy_sentence])
|
44 |
if dummy_result:
|
|
|
1 |
import streamlit as st
|
2 |
+
from stereotype_detector import Detector
|
3 |
|
4 |
+
st.title("Stereotype Detection")
|
5 |
|
6 |
EXAMPLES = {
|
7 |
"Gender": "The baby loved the presence of his caring mommy.",
|
|
|
34 |
return formatted
|
35 |
|
36 |
level = st.selectbox("Select the Detection Levels:", ("Sentence","Token"))
|
|
|
37 |
|
38 |
if st.button("Load Models"):
|
39 |
with st.spinner('Loading models...'):
|
40 |
+
st.session_state["detector"] = Detector(level)
|
41 |
dummy_sentence = "This is a dummy sentence."
|
42 |
dummy_result = st.session_state["detector"].predict([dummy_sentence])
|
43 |
if dummy_result:
|
bias_detector/__init__.py
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
from .bias_detector import Detector
|
|
|
|
bias_detector/__pycache__/bias_detector.cpython-311.pyc
DELETED
Binary file (5.28 kB)
|
|
stereotype_detector/__init__.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
from .stereotype_detector import Detector
|
{bias_detector β stereotype_detector}/__pycache__/__init__.cpython-311.pyc
RENAMED
File without changes
|
bias_detector/bias_detector.py β stereotype_detector/stereotype_detector.py
RENAMED
@@ -7,41 +7,25 @@ class Detector:
|
|
7 |
A class for detecting various forms of bias in text using pre-trained models.
|
8 |
"""
|
9 |
|
10 |
-
def __init__(self, classifier
|
11 |
"""
|
12 |
Initializes the detector with a specific model.
|
13 |
|
14 |
Args:
|
15 |
classifier (str): The type of classifier to use.
|
16 |
-
model_type (str): The type of the model to use.
|
17 |
"""
|
18 |
self.classifier = classifier
|
19 |
-
self.model_type = model_type
|
20 |
self.classifier_model_mapping = {
|
21 |
-
"Token":
|
22 |
-
|
23 |
-
"Race": "wu981526092/Token-Level-Race-Bias-Detector",
|
24 |
-
"Gender": "wu981526092/Token-Level-Gender-Bias-Detector",
|
25 |
-
"Profession": "wu981526092/Token-Level-Profession-Bias-Detector",
|
26 |
-
"Religion": "wu981526092/Token-Level-Religion-Bias-Detector",
|
27 |
-
},
|
28 |
-
"Sentence": {
|
29 |
-
"All": "wu981526092/Sentence-Level-Multidimensional-Bias-Detector",
|
30 |
-
"Religion": "wu981526092/Sentence-Level-Religion-Bias-Detector",
|
31 |
-
"Profession": "wu981526092/Sentence-Level-Profession-Bias-Detector",
|
32 |
-
"Race": "wu981526092/Sentence-Level-Race-Bias-Detector",
|
33 |
-
"Gender": "wu981526092/Sentence-Level-Gender-Bias-Detector",
|
34 |
-
}
|
35 |
}
|
36 |
|
37 |
if classifier not in self.classifier_model_mapping:
|
38 |
raise ValueError(f"Invalid classifier. Expected one of: {list(self.classifier_model_mapping.keys())}")
|
39 |
|
40 |
-
if model_type not in self.classifier_model_mapping[classifier]:
|
41 |
-
raise ValueError(
|
42 |
-
f"Invalid model_type for {classifier}. Expected one of: {list(self.classifier_model_mapping[classifier].keys())}")
|
43 |
|
44 |
-
|
|
|
45 |
API_token = os.getenv("BIAS_DETECTOR_API_KEY")
|
46 |
# Using pipeline for inference
|
47 |
if classifier == 'Token':
|
@@ -80,7 +64,7 @@ class Detector:
|
|
80 |
|
81 |
|
82 |
if __name__ == '__main__':
|
83 |
-
detector_test = Detector("Sentence"
|
84 |
test_data = ["The girl performed poorly at reading in school.","Sir is an address showing respect for a person. It usually refers to a male."]
|
85 |
result = detector_test.predict(test_data)
|
86 |
print(result)
|
|
|
7 |
A class for detecting various forms of bias in text using pre-trained models.
|
8 |
"""
|
9 |
|
10 |
+
def __init__(self, classifier):
|
11 |
"""
|
12 |
Initializes the detector with a specific model.
|
13 |
|
14 |
Args:
|
15 |
classifier (str): The type of classifier to use.
|
|
|
16 |
"""
|
17 |
self.classifier = classifier
|
|
|
18 |
self.classifier_model_mapping = {
|
19 |
+
"Token": "wu981526092/Token-Level-Stereotype-Detector",
|
20 |
+
"Sentence": "wu981526092/Sentence-Level-Stereotype-Detector"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
}
|
22 |
|
23 |
if classifier not in self.classifier_model_mapping:
|
24 |
raise ValueError(f"Invalid classifier. Expected one of: {list(self.classifier_model_mapping.keys())}")
|
25 |
|
|
|
|
|
|
|
26 |
|
27 |
+
|
28 |
+
self.model_path = self.classifier_model_mapping[classifier]
|
29 |
API_token = os.getenv("BIAS_DETECTOR_API_KEY")
|
30 |
# Using pipeline for inference
|
31 |
if classifier == 'Token':
|
|
|
64 |
|
65 |
|
66 |
if __name__ == '__main__':
|
67 |
+
detector_test = Detector("Sentence")
|
68 |
test_data = ["The girl performed poorly at reading in school.","Sir is an address showing respect for a person. It usually refers to a male."]
|
69 |
result = detector_test.predict(test_data)
|
70 |
print(result)
|