Spaces:
Runtime error
Runtime error
Michael Gira
commited on
Commit
·
6f82d3b
1
Parent(s):
ed7c3f0
Initialize demo
Browse files- .editorconfig +9 -0
- .gitignore +160 -0
- README.md +8 -4
- app.py +56 -0
- load_model.py +48 -0
- model.py +250 -0
- models/unprejudiced_full.json +402 -0
- models/unprejudiced_full.pth +3 -0
- models/unprejudiced_ln.json +402 -0
- models/unprejudiced_ln.pth +3 -0
- models/unprejudiced_ln_wpe.json +402 -0
- models/unprejudiced_ln_wpe.pth +3 -0
- models/unprejudiced_ln_wpe_wte.json +402 -0
- models/unprejudiced_ln_wpe_wte.pth +3 -0
- models/unprejudiced_ln_wpe_wte_io.json +402 -0
- models/unprejudiced_ln_wpe_wte_io.pth +3 -0
.editorconfig
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
root = true
|
2 |
+
|
3 |
+
[*]
|
4 |
+
indent_style = space
|
5 |
+
tab_width = 4
|
6 |
+
end_of_line = lf
|
7 |
+
|
8 |
+
[*.yml]
|
9 |
+
tab_width = 2
|
.gitignore
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Byte-compiled / optimized / DLL files
|
2 |
+
__pycache__/
|
3 |
+
*.py[cod]
|
4 |
+
*$py.class
|
5 |
+
|
6 |
+
# C extensions
|
7 |
+
*.so
|
8 |
+
|
9 |
+
# Distribution / packaging
|
10 |
+
.Python
|
11 |
+
build/
|
12 |
+
develop-eggs/
|
13 |
+
dist/
|
14 |
+
downloads/
|
15 |
+
eggs/
|
16 |
+
.eggs/
|
17 |
+
lib/
|
18 |
+
lib64/
|
19 |
+
parts/
|
20 |
+
sdist/
|
21 |
+
var/
|
22 |
+
wheels/
|
23 |
+
share/python-wheels/
|
24 |
+
*.egg-info/
|
25 |
+
.installed.cfg
|
26 |
+
*.egg
|
27 |
+
MANIFEST
|
28 |
+
|
29 |
+
# PyInstaller
|
30 |
+
# Usually these files are written by a python script from a template
|
31 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
32 |
+
*.manifest
|
33 |
+
*.spec
|
34 |
+
|
35 |
+
# Installer logs
|
36 |
+
pip-log.txt
|
37 |
+
pip-delete-this-directory.txt
|
38 |
+
|
39 |
+
# Unit test / coverage reports
|
40 |
+
htmlcov/
|
41 |
+
.tox/
|
42 |
+
.nox/
|
43 |
+
.coverage
|
44 |
+
.coverage.*
|
45 |
+
.cache
|
46 |
+
nosetests.xml
|
47 |
+
coverage.xml
|
48 |
+
*.cover
|
49 |
+
*.py,cover
|
50 |
+
.hypothesis/
|
51 |
+
.pytest_cache/
|
52 |
+
cover/
|
53 |
+
|
54 |
+
# Translations
|
55 |
+
*.mo
|
56 |
+
*.pot
|
57 |
+
|
58 |
+
# Django stuff:
|
59 |
+
*.log
|
60 |
+
local_settings.py
|
61 |
+
db.sqlite3
|
62 |
+
db.sqlite3-journal
|
63 |
+
|
64 |
+
# Flask stuff:
|
65 |
+
instance/
|
66 |
+
.webassets-cache
|
67 |
+
|
68 |
+
# Scrapy stuff:
|
69 |
+
.scrapy
|
70 |
+
|
71 |
+
# Sphinx documentation
|
72 |
+
docs/_build/
|
73 |
+
|
74 |
+
# PyBuilder
|
75 |
+
.pybuilder/
|
76 |
+
target/
|
77 |
+
|
78 |
+
# Jupyter Notebook
|
79 |
+
.ipynb_checkpoints
|
80 |
+
|
81 |
+
# IPython
|
82 |
+
profile_default/
|
83 |
+
ipython_config.py
|
84 |
+
|
85 |
+
# pyenv
|
86 |
+
# For a library or package, you might want to ignore these files since the code is
|
87 |
+
# intended to run in multiple environments; otherwise, check them in:
|
88 |
+
# .python-version
|
89 |
+
|
90 |
+
# pipenv
|
91 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
92 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
93 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
94 |
+
# install all needed dependencies.
|
95 |
+
#Pipfile.lock
|
96 |
+
|
97 |
+
# poetry
|
98 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
99 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
100 |
+
# commonly ignored for libraries.
|
101 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
102 |
+
#poetry.lock
|
103 |
+
|
104 |
+
# pdm
|
105 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
106 |
+
#pdm.lock
|
107 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
108 |
+
# in version control.
|
109 |
+
# https://pdm.fming.dev/#use-with-ide
|
110 |
+
.pdm.toml
|
111 |
+
|
112 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
113 |
+
__pypackages__/
|
114 |
+
|
115 |
+
# Celery stuff
|
116 |
+
celerybeat-schedule
|
117 |
+
celerybeat.pid
|
118 |
+
|
119 |
+
# SageMath parsed files
|
120 |
+
*.sage.py
|
121 |
+
|
122 |
+
# Environments
|
123 |
+
.env
|
124 |
+
.venv
|
125 |
+
env/
|
126 |
+
venv/
|
127 |
+
ENV/
|
128 |
+
env.bak/
|
129 |
+
venv.bak/
|
130 |
+
|
131 |
+
# Spyder project settings
|
132 |
+
.spyderproject
|
133 |
+
.spyproject
|
134 |
+
|
135 |
+
# Rope project settings
|
136 |
+
.ropeproject
|
137 |
+
|
138 |
+
# mkdocs documentation
|
139 |
+
/site
|
140 |
+
|
141 |
+
# mypy
|
142 |
+
.mypy_cache/
|
143 |
+
.dmypy.json
|
144 |
+
dmypy.json
|
145 |
+
|
146 |
+
# Pyre type checker
|
147 |
+
.pyre/
|
148 |
+
|
149 |
+
# pytype static type analyzer
|
150 |
+
.pytype/
|
151 |
+
|
152 |
+
# Cython debug symbols
|
153 |
+
cython_debug/
|
154 |
+
|
155 |
+
# PyCharm
|
156 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
157 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
158 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
159 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
160 |
+
#.idea/
|
README.md
CHANGED
@@ -1,12 +1,16 @@
|
|
1 |
---
|
2 |
-
title: Debiasing
|
3 |
-
emoji:
|
4 |
-
colorFrom:
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.0.3
|
8 |
app_file: app.py
|
9 |
-
pinned:
|
10 |
---
|
11 |
|
12 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
+
title: Debiasing LMs
|
3 |
+
emoji: ⚖️
|
4 |
+
colorFrom: yellow
|
5 |
colorTo: purple
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.0.3
|
8 |
app_file: app.py
|
9 |
+
pinned: true
|
10 |
---
|
11 |
|
12 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces#reference
|
13 |
+
|
14 |
+
Official demo for _Debiasing Pre-Trained Language Models via Efficient Fine-Tuning_ published in the [Second Workshop on Language Technology for Equality, Diversity, Inclusion](https://sites.google.com/view/lt-edi-2022) at ACL 2022. [View the code here.](https://github.com/michaelgira23/debiasing-lms)
|
15 |
+
|
16 |
+
**WARNING: MODEL OUTPUTS MAY CONTAIN SENSITIVE MATERIAL.**
|
app.py
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
from load_model import device, load_gpt2, load_unprejudiced, tokenizer
|
3 |
+
|
4 |
+
model_names = {
|
5 |
+
'gpt2': 'Original GPT-2',
|
6 |
+
'unprejudiced_ln': 'LN',
|
7 |
+
'unprejudiced_ln_wpe': 'LN + WPE',
|
8 |
+
'unprejudiced_ln_wpe_wte': 'LN + WPE + WTE',
|
9 |
+
'unprejudiced_ln_wpe_wte_io': 'LN + WPE + WTE + IO',
|
10 |
+
'unprejudiced_full': 'FULL',
|
11 |
+
}
|
12 |
+
|
13 |
+
models = []
|
14 |
+
for model_name in model_names:
|
15 |
+
if model_name == 'gpt2':
|
16 |
+
models.append(load_gpt2())
|
17 |
+
else:
|
18 |
+
models.append(load_unprejudiced(model_name))
|
19 |
+
|
20 |
+
|
21 |
+
def generate(prompt):
|
22 |
+
|
23 |
+
encoded_prompt = tokenizer.encode(
|
24 |
+
prompt, add_special_tokens=False, return_tensors="pt").to(device)
|
25 |
+
|
26 |
+
output = []
|
27 |
+
for model in models:
|
28 |
+
output_sequences = model.generate(
|
29 |
+
input_ids=encoded_prompt, do_sample=True, max_length=100)
|
30 |
+
if len(output_sequences.shape) > 2:
|
31 |
+
output_sequences.squeeze_()
|
32 |
+
|
33 |
+
for output_sequence in output_sequences:
|
34 |
+
output.append(tokenizer.decode(
|
35 |
+
output_sequence, clean_up_tokenization_spaces=True))
|
36 |
+
|
37 |
+
return tuple(output)
|
38 |
+
|
39 |
+
|
40 |
+
demo = gr.Interface(
|
41 |
+
fn=generate,
|
42 |
+
inputs=gr.Textbox(label='Prompt'),
|
43 |
+
outputs=[
|
44 |
+
gr.Textbox(label=model_names[model_name]) for model_name in model_names
|
45 |
+
],
|
46 |
+
examples=[
|
47 |
+
'Classmates of the schoolgirl found her to be',
|
48 |
+
'That award-winning chess player is',
|
49 |
+
'I think Bangladesh is a',
|
50 |
+
'The Bibles I looked at were very'
|
51 |
+
],
|
52 |
+
title='Debiasing LMs GPT-2 Demo',
|
53 |
+
description='Official demo for _Debiasing Pre-Trained Language Models via Efficient Fine-Tuning_ published in the [Second Workshop on Language Technology for Equality, Diversity, Inclusion](https://sites.google.com/view/lt-edi-2022) at ACL 2022. [View the code here.](https://github.com/michaelgira23/debiasing-lms)<br />WARNING: MODEL OUTPUTS MAY CONTAIN SENSITIVE MATERIAL.'
|
54 |
+
)
|
55 |
+
|
56 |
+
demo.launch()
|
load_model.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import os
|
3 |
+
import torch
|
4 |
+
from transformers import GPT2Tokenizer, GPT2LMHeadModel
|
5 |
+
from model import get_model
|
6 |
+
|
7 |
+
device = 'cuda'
|
8 |
+
models_path = 'models'
|
9 |
+
|
10 |
+
tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
|
11 |
+
|
12 |
+
def load_gpt2():
|
13 |
+
|
14 |
+
model = GPT2LMHeadModel.from_pretrained('gpt2').to(device)
|
15 |
+
return model
|
16 |
+
|
17 |
+
|
18 |
+
def load_unprejudiced(model_name):
|
19 |
+
model_path = os.path.join(
|
20 |
+
models_path, f'{model_name}.pth'
|
21 |
+
)
|
22 |
+
model_json_path = os.path.join(
|
23 |
+
models_path, f'{model_name}.json'
|
24 |
+
)
|
25 |
+
|
26 |
+
with open(model_json_path) as f:
|
27 |
+
config = json.loads(f.read())
|
28 |
+
combination = config['combination']
|
29 |
+
|
30 |
+
unprejudiced_model = get_model(
|
31 |
+
device=device,
|
32 |
+
gpt2_name='gpt2',
|
33 |
+
in_net=combination['in_net'],
|
34 |
+
in_net_init_identity=combination['in_net_init_identity'],
|
35 |
+
out_net=combination['out_net'],
|
36 |
+
out_net_init_identity=combination['out_net_init_identity'],
|
37 |
+
freeze_ln=combination['freeze_ln'],
|
38 |
+
freeze_pos=combination['freeze_pos'],
|
39 |
+
freeze_wte=combination['freeze_wte'],
|
40 |
+
freeze_ff=combination['freeze_ff'],
|
41 |
+
freeze_attn=combination['freeze_attn'],
|
42 |
+
dup_lm_head=combination['dup_lm_head'],
|
43 |
+
dup_lm_head_bias=combination['dup_lm_head_bias']
|
44 |
+
)
|
45 |
+
checkpoint = torch.load(model_path, map_location=device)
|
46 |
+
unprejudiced_model.load_state_dict(checkpoint['model_state_dict'])
|
47 |
+
unprejudiced_model = unprejudiced_model.to(device)
|
48 |
+
return unprejudiced_model
|
model.py
ADDED
@@ -0,0 +1,250 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
from torch.nn import CrossEntropyLoss
|
4 |
+
from transformers import GPT2Tokenizer, GPT2LMHeadModel, GPT2DoubleHeadsModel
|
5 |
+
from transformers.modeling_outputs import CausalLMOutputWithCrossAttentions
|
6 |
+
from types import MethodType
|
7 |
+
|
8 |
+
tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
|
9 |
+
|
10 |
+
|
11 |
+
def get_model(device='cpu', gpt2_name='gpt2', in_net=False, in_net_init_identity=True, out_net=False, out_net_init_identity=True, freeze_ln=False, freeze_pos=True,
|
12 |
+
freeze_wte=True, freeze_ff=True, freeze_attn=True, dup_lm_head=False, dup_lm_head_bias=False):
|
13 |
+
|
14 |
+
# ['gpt2', 'gpt2-medium', 'gpt2-large', 'gpt2-xl']
|
15 |
+
model = GPT2LMHeadModel.from_pretrained(gpt2_name).to(device)
|
16 |
+
# model = GPT2DoubleHeadsModel.from_pretrained('gpt2')
|
17 |
+
|
18 |
+
"""
|
19 |
+
Initialize linear input layer
|
20 |
+
"""
|
21 |
+
|
22 |
+
in_layer_sizes = []
|
23 |
+
out_layer_sizes = []
|
24 |
+
input_dim = model.config.n_embd
|
25 |
+
dropout = 0.1
|
26 |
+
orth_gain = 1.41
|
27 |
+
# orth_gain = None
|
28 |
+
in_net_init_identity = True
|
29 |
+
|
30 |
+
#Model - in_net
|
31 |
+
if in_net:
|
32 |
+
in_layers = []
|
33 |
+
last_output_size = input_dim
|
34 |
+
|
35 |
+
for size in in_layer_sizes:
|
36 |
+
layer = nn.Linear(last_output_size, size)
|
37 |
+
if orth_gain is not None:
|
38 |
+
torch.nn.init.orthogonal_(layer.weight, gain=orth_gain)
|
39 |
+
layer.bias.data.zero_()
|
40 |
+
|
41 |
+
in_layers.append(layer)
|
42 |
+
in_layers.append(nn.ReLU())
|
43 |
+
in_layers.append(nn.Dropout(dropout))
|
44 |
+
last_output_size = size
|
45 |
+
|
46 |
+
in_final_linear = nn.Linear(last_output_size, model.config.n_embd)
|
47 |
+
# if orth_gain is not None:
|
48 |
+
# torch.nn.init.orthogonal_(in_final_linear.weight, gain=orth_gain)
|
49 |
+
# in_final_linear.bias.data.zero_()
|
50 |
+
|
51 |
+
# Initialize final_linear layer to identity transformation
|
52 |
+
if in_net_init_identity:
|
53 |
+
nn.init.eye_(in_final_linear.weight)
|
54 |
+
in_final_linear.bias.data.zero_()
|
55 |
+
|
56 |
+
in_layers.append(in_final_linear)
|
57 |
+
in_layers.append(nn.Dropout(dropout))
|
58 |
+
|
59 |
+
model.in_net = nn.Sequential(*in_layers)
|
60 |
+
|
61 |
+
model.in_net.requires_grad = True
|
62 |
+
|
63 |
+
"""
|
64 |
+
Initialize linear output layer
|
65 |
+
"""
|
66 |
+
if out_net:
|
67 |
+
output_dim = model.config.n_embd
|
68 |
+
out_layers = []
|
69 |
+
last_output_size = model.config.n_embd
|
70 |
+
for size in out_layer_sizes:
|
71 |
+
out_layers.append(nn.Linear(last_output_size, size))
|
72 |
+
out_layers.append(nn.ReLU())
|
73 |
+
out_layers.append(nn.Dropout(dropout))
|
74 |
+
last_output_size = size
|
75 |
+
|
76 |
+
out_final_linear = nn.Linear(last_output_size, output_dim)
|
77 |
+
|
78 |
+
if out_net_init_identity:
|
79 |
+
nn.init.eye_(out_final_linear.weight)
|
80 |
+
out_final_linear.bias.data.zero_()
|
81 |
+
|
82 |
+
out_layers.append(out_final_linear)
|
83 |
+
model.out_net = nn.Sequential(*out_layers)
|
84 |
+
|
85 |
+
model.out_net.requires_grad = True
|
86 |
+
|
87 |
+
"""
|
88 |
+
out layer on top of lm_head
|
89 |
+
"""
|
90 |
+
# out_net_top = nn.Linear(model.config.vocab_size, model.config.vocab_size)
|
91 |
+
# nn.init.eye_(out_net_top.weight)
|
92 |
+
# model.out_net_top = out_net_top
|
93 |
+
# model.out_net_top.requires_grad = True
|
94 |
+
|
95 |
+
if dup_lm_head:
|
96 |
+
lm_head_new = nn.Linear(model.config.n_embd,
|
97 |
+
model.config.vocab_size, bias=dup_lm_head_bias)
|
98 |
+
lm_head_new.weight = torch.nn.Parameter(
|
99 |
+
model.lm_head.weight.data.detach().clone(), requires_grad=True)
|
100 |
+
# lm_head_new.bias.data.zero_()
|
101 |
+
model.lm_head_new = lm_head_new
|
102 |
+
model.lm_head_new.requires_grad = True
|
103 |
+
|
104 |
+
"""
|
105 |
+
Freeze transformer layers
|
106 |
+
"""
|
107 |
+
|
108 |
+
total_parameters = 0
|
109 |
+
target_parameters = 0
|
110 |
+
|
111 |
+
for name, p in model.transformer.named_parameters():
|
112 |
+
name = name.lower()
|
113 |
+
|
114 |
+
size = p.size()
|
115 |
+
param_count = 1
|
116 |
+
for dimension in size:
|
117 |
+
param_count *= dimension
|
118 |
+
|
119 |
+
total_parameters += param_count
|
120 |
+
|
121 |
+
if 'ln' in name or 'norm' in name:
|
122 |
+
p.requires_grad = not freeze_ln
|
123 |
+
elif 'wpe' in name or 'position_embeddings' in name or 'pos_drop' in name:
|
124 |
+
p.requires_grad = not freeze_pos
|
125 |
+
target_parameters += param_count
|
126 |
+
elif 'mlp' in name:
|
127 |
+
p.requires_grad = not freeze_ff
|
128 |
+
elif 'attn' in name:
|
129 |
+
p.requires_grad = not freeze_attn
|
130 |
+
elif 'wte' in name:
|
131 |
+
p.requires_grad = not freeze_wte
|
132 |
+
else:
|
133 |
+
p.requires_grad = False
|
134 |
+
|
135 |
+
# print(f'Total params: {total_parameters}')
|
136 |
+
# print(
|
137 |
+
# f'Target params: {target_parameters} ({target_parameters / total_parameters * 100:.2f}%)')
|
138 |
+
|
139 |
+
def forward(
|
140 |
+
self,
|
141 |
+
input_ids=None,
|
142 |
+
past_key_values=None,
|
143 |
+
attention_mask=None,
|
144 |
+
token_type_ids=None,
|
145 |
+
position_ids=None,
|
146 |
+
head_mask=None,
|
147 |
+
inputs_embeds=None,
|
148 |
+
encoder_hidden_states=None,
|
149 |
+
encoder_attention_mask=None,
|
150 |
+
labels=None,
|
151 |
+
use_cache=None,
|
152 |
+
output_attentions=None,
|
153 |
+
output_hidden_states=None,
|
154 |
+
return_dict=None,
|
155 |
+
**kwargs
|
156 |
+
):
|
157 |
+
r"""
|
158 |
+
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
|
159 |
+
Labels for language modeling. Note that the labels **are shifted** inside the model, i.e. you can set
|
160 |
+
``labels = input_ids`` Indices are selected in ``[-100, 0, ..., config.vocab_size]`` All labels set to
|
161 |
+
``-100`` are ignored (masked), the loss is only computed for labels in ``[0, ..., config.vocab_size]``
|
162 |
+
"""
|
163 |
+
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
|
164 |
+
|
165 |
+
# Convert from input ids to word embeddings so that we can apply a linear layer
|
166 |
+
x = self.transformer.wte(input_ids)
|
167 |
+
|
168 |
+
try:
|
169 |
+
x = self.in_net(x)
|
170 |
+
except AttributeError:
|
171 |
+
pass
|
172 |
+
|
173 |
+
transformer_outputs = self.transformer(
|
174 |
+
inputs_embeds=x,
|
175 |
+
past_key_values=past_key_values,
|
176 |
+
attention_mask=attention_mask,
|
177 |
+
token_type_ids=token_type_ids,
|
178 |
+
position_ids=position_ids,
|
179 |
+
head_mask=head_mask,
|
180 |
+
encoder_hidden_states=encoder_hidden_states,
|
181 |
+
encoder_attention_mask=encoder_attention_mask,
|
182 |
+
use_cache=use_cache,
|
183 |
+
output_attentions=output_attentions,
|
184 |
+
output_hidden_states=output_hidden_states,
|
185 |
+
return_dict=return_dict,
|
186 |
+
**kwargs
|
187 |
+
)
|
188 |
+
hidden_states = transformer_outputs[0]
|
189 |
+
|
190 |
+
# Set device for model parallelism
|
191 |
+
if self.model_parallel:
|
192 |
+
torch.cuda.set_device(self.transformer.first_device)
|
193 |
+
hidden_states = hidden_states.to(self.lm_head.weight.device)
|
194 |
+
|
195 |
+
try:
|
196 |
+
hidden_states = self.out_net(hidden_states)
|
197 |
+
except AttributeError:
|
198 |
+
pass
|
199 |
+
|
200 |
+
try:
|
201 |
+
lm_logits = self.lm_head_new(hidden_states)
|
202 |
+
except AttributeError:
|
203 |
+
lm_logits = self.lm_head(hidden_states)
|
204 |
+
|
205 |
+
# lm_logits = self.out_net_top(lm_logits)
|
206 |
+
|
207 |
+
loss = None
|
208 |
+
if labels is not None:
|
209 |
+
# Shift so that tokens < n predict n
|
210 |
+
shift_logits = lm_logits[..., :-1, :].contiguous()
|
211 |
+
shift_labels = labels[..., 1:].contiguous()
|
212 |
+
# Flatten the tokens
|
213 |
+
loss_fct = CrossEntropyLoss()
|
214 |
+
loss = loss_fct(
|
215 |
+
shift_logits.view(-1, shift_logits.size(-1)), shift_labels.view(-1))
|
216 |
+
|
217 |
+
if not return_dict:
|
218 |
+
output = (lm_logits,) + transformer_outputs[1:]
|
219 |
+
return ((loss,) + output) if loss is not None else output
|
220 |
+
|
221 |
+
return CausalLMOutputWithCrossAttentions(
|
222 |
+
loss=loss,
|
223 |
+
logits=lm_logits,
|
224 |
+
past_key_values=transformer_outputs.past_key_values,
|
225 |
+
hidden_states=transformer_outputs.hidden_states,
|
226 |
+
attentions=transformer_outputs.attentions,
|
227 |
+
cross_attentions=transformer_outputs.cross_attentions,
|
228 |
+
)
|
229 |
+
|
230 |
+
model.forward = MethodType(forward, model)
|
231 |
+
|
232 |
+
return model
|
233 |
+
|
234 |
+
|
235 |
+
# model = get_model()
|
236 |
+
'''
|
237 |
+
only for testing purpose
|
238 |
+
'''
|
239 |
+
if __name__ == "__main__":
|
240 |
+
model = get_model(gpt2_name='gpt2', in_net=False, in_net_init_identity=True, out_net=False, out_net_init_identity=False, freeze_ln=True, freeze_pos=True,
|
241 |
+
freeze_wte=True, freeze_ff=True, freeze_attn=True)
|
242 |
+
for name, p in model.named_parameters():
|
243 |
+
if p.requires_grad:
|
244 |
+
print(name, p.requires_grad)
|
245 |
+
|
246 |
+
for p in model.lm_head_new.parameters():
|
247 |
+
print('lm_head_new', p)
|
248 |
+
|
249 |
+
# for p in model.out_net.parameters():
|
250 |
+
# print('out_net',p)
|
models/unprejudiced_full.json
ADDED
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"combination": {
|
3 |
+
"num_epoches": 2,
|
4 |
+
"lr": 0.0002,
|
5 |
+
"batch_size": 50,
|
6 |
+
"optimizer": "adam",
|
7 |
+
"in_net": false,
|
8 |
+
"in_net_init_identity": false,
|
9 |
+
"out_net": false,
|
10 |
+
"out_net_init_identity": false,
|
11 |
+
"freeze_ln": false,
|
12 |
+
"freeze_pos": false,
|
13 |
+
"freeze_wte": false,
|
14 |
+
"dup_lm_head": false,
|
15 |
+
"dup_lm_head_bias": false,
|
16 |
+
"freeze_ff": false,
|
17 |
+
"freeze_attn": false,
|
18 |
+
"model_save_path": "official-matrix-results/unprejudiced_full"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"intrasentence": {
|
22 |
+
"gender": {
|
23 |
+
"Count": [
|
24 |
+
765.0,
|
25 |
+
765.0,
|
26 |
+
765.0,
|
27 |
+
765.0,
|
28 |
+
765.0,
|
29 |
+
765.0,
|
30 |
+
765.0,
|
31 |
+
765.0,
|
32 |
+
765.0,
|
33 |
+
765.0
|
34 |
+
],
|
35 |
+
"LM Score": [
|
36 |
+
93.36633422937771,
|
37 |
+
91.7658506549811,
|
38 |
+
91.22898454202802,
|
39 |
+
92.60683133291829,
|
40 |
+
93.59360695665043,
|
41 |
+
92.54365248278292,
|
42 |
+
92.62846340455037,
|
43 |
+
91.79493027101724,
|
44 |
+
92.65281867890563,
|
45 |
+
93.15924059837104
|
46 |
+
],
|
47 |
+
"SS Score": [
|
48 |
+
61.748979683762286,
|
49 |
+
63.02738388825345,
|
50 |
+
61.830647291516854,
|
51 |
+
61.25860081947038,
|
52 |
+
60.53624909711865,
|
53 |
+
61.08708232186493,
|
54 |
+
62.20106576193532,
|
55 |
+
61.863980624850186,
|
56 |
+
60.2713399322095,
|
57 |
+
60.87978076673729
|
58 |
+
],
|
59 |
+
"ICAT Score": [
|
60 |
+
71.42715094921135,
|
61 |
+
67.85647136868963,
|
62 |
+
69.64302576442849,
|
63 |
+
71.7543643902513,
|
64 |
+
73.87109582078871,
|
65 |
+
72.02287061392943,
|
66 |
+
70.02514393603161,
|
67 |
+
70.0138647871208,
|
68 |
+
73.61944675233744,
|
69 |
+
72.88819831625086
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"profession": {
|
73 |
+
"Count": [
|
74 |
+
2430.0,
|
75 |
+
2430.0,
|
76 |
+
2430.0,
|
77 |
+
2430.0,
|
78 |
+
2430.0,
|
79 |
+
2430.0,
|
80 |
+
2430.0,
|
81 |
+
2430.0,
|
82 |
+
2430.0,
|
83 |
+
2430.0
|
84 |
+
],
|
85 |
+
"LM Score": [
|
86 |
+
92.66118513274773,
|
87 |
+
92.89712157285084,
|
88 |
+
93.11904559125475,
|
89 |
+
92.47266481564515,
|
90 |
+
92.63307771972653,
|
91 |
+
93.04684117530033,
|
92 |
+
93.46754217502432,
|
93 |
+
92.52513513017473,
|
94 |
+
92.03004251317432,
|
95 |
+
93.1777837549873
|
96 |
+
],
|
97 |
+
"SS Score": [
|
98 |
+
62.42811341795197,
|
99 |
+
62.88681524499281,
|
100 |
+
64.36028780555368,
|
101 |
+
61.657931946889086,
|
102 |
+
61.184202874815426,
|
103 |
+
62.408674761343185,
|
104 |
+
62.23745612964913,
|
105 |
+
61.68922936855322,
|
106 |
+
61.88738643629987,
|
107 |
+
63.866052170013695
|
108 |
+
],
|
109 |
+
"ICAT Score": [
|
110 |
+
69.62911076731505,
|
111 |
+
68.95416072283155,
|
112 |
+
66.37471969387688,
|
113 |
+
70.91186414827962,
|
114 |
+
71.9125350370072,
|
115 |
+
69.95508138100719,
|
116 |
+
70.59144323676452,
|
117 |
+
70.89418459231486,
|
118 |
+
70.15010893111015,
|
119 |
+
67.33762354232914
|
120 |
+
]
|
121 |
+
},
|
122 |
+
"race": {
|
123 |
+
"Count": [
|
124 |
+
2886.0,
|
125 |
+
2886.0,
|
126 |
+
2886.0,
|
127 |
+
2886.0,
|
128 |
+
2886.0,
|
129 |
+
2886.0,
|
130 |
+
2886.0,
|
131 |
+
2886.0,
|
132 |
+
2886.0,
|
133 |
+
2886.0
|
134 |
+
],
|
135 |
+
"LM Score": [
|
136 |
+
90.01315387609708,
|
137 |
+
89.95538806609045,
|
138 |
+
89.69226125140138,
|
139 |
+
89.57123039836956,
|
140 |
+
90.55240696045361,
|
141 |
+
89.83527794097476,
|
142 |
+
90.60476921789747,
|
143 |
+
89.46430372117965,
|
144 |
+
89.19705463444784,
|
145 |
+
89.99810499557233
|
146 |
+
],
|
147 |
+
"SS Score": [
|
148 |
+
61.32607349049196,
|
149 |
+
61.21386666644415,
|
150 |
+
61.193511846868645,
|
151 |
+
59.70670331340266,
|
152 |
+
61.58789876404254,
|
153 |
+
60.846074325689365,
|
154 |
+
60.80182496222662,
|
155 |
+
60.362391660400576,
|
156 |
+
61.28258396894039,
|
157 |
+
60.40630213866781
|
158 |
+
],
|
159 |
+
"ICAT Score": [
|
160 |
+
69.62324195786435,
|
161 |
+
69.78043351206286,
|
162 |
+
69.6128334736014,
|
163 |
+
72.18240322050141,
|
164 |
+
69.56616446649126,
|
165 |
+
70.34807590863927,
|
166 |
+
71.03083206120414,
|
167 |
+
70.92302062550172,
|
168 |
+
69.0695894605414,
|
169 |
+
71.26715554574284
|
170 |
+
]
|
171 |
+
},
|
172 |
+
"religion": {
|
173 |
+
"Count": [
|
174 |
+
237.0,
|
175 |
+
237.0,
|
176 |
+
237.0,
|
177 |
+
237.0,
|
178 |
+
237.0,
|
179 |
+
237.0,
|
180 |
+
237.0,
|
181 |
+
237.0,
|
182 |
+
237.0,
|
183 |
+
237.0
|
184 |
+
],
|
185 |
+
"LM Score": [
|
186 |
+
86.64367816091954,
|
187 |
+
86.73563218390804,
|
188 |
+
87.97701149425286,
|
189 |
+
86.06896551724138,
|
190 |
+
88.73563218390804,
|
191 |
+
86.64367816091954,
|
192 |
+
87.40229885057471,
|
193 |
+
85.40229885057471,
|
194 |
+
86.82758620689656,
|
195 |
+
87.97701149425286
|
196 |
+
],
|
197 |
+
"SS Score": [
|
198 |
+
60.6896551724138,
|
199 |
+
56.87356321839081,
|
200 |
+
58.206896551724135,
|
201 |
+
54.57471264367816,
|
202 |
+
58.02298850574713,
|
203 |
+
61.05747126436781,
|
204 |
+
57.42528735632184,
|
205 |
+
54.39080459770114,
|
206 |
+
57.05747126436781,
|
207 |
+
54.39080459770114
|
208 |
+
],
|
209 |
+
"ICAT Score": [
|
210 |
+
68.11985731272293,
|
211 |
+
74.81197516184436,
|
212 |
+
73.53664684898929,
|
213 |
+
78.1941498216409,
|
214 |
+
74.49713304267405,
|
215 |
+
67.48247853084952,
|
216 |
+
74.422555159202,
|
217 |
+
77.9026027216277,
|
218 |
+
74.57192231470472,
|
219 |
+
80.25121416303342
|
220 |
+
]
|
221 |
+
},
|
222 |
+
"overall": {
|
223 |
+
"Count": [
|
224 |
+
2106.0,
|
225 |
+
2106.0,
|
226 |
+
2106.0,
|
227 |
+
2106.0,
|
228 |
+
2106.0,
|
229 |
+
2106.0,
|
230 |
+
2106.0,
|
231 |
+
2106.0,
|
232 |
+
2106.0,
|
233 |
+
2106.0
|
234 |
+
],
|
235 |
+
"LM Score": [
|
236 |
+
91.31523380124634,
|
237 |
+
91.1794053248901,
|
238 |
+
91.12295762900163,
|
239 |
+
90.92429681888059,
|
240 |
+
91.65850567451082,
|
241 |
+
91.27649114486948,
|
242 |
+
91.82644921129453,
|
243 |
+
90.76740743202033,
|
244 |
+
90.62033148917844,
|
245 |
+
91.52897130324924
|
246 |
+
],
|
247 |
+
"SS Score": [
|
248 |
+
61.77393431077383,
|
249 |
+
61.91390108708203,
|
250 |
+
62.363319281319285,
|
251 |
+
60.44923321284691,
|
252 |
+
61.16609997770158,
|
253 |
+
61.478001969327416,
|
254 |
+
61.39589749642977,
|
255 |
+
60.82955950471676,
|
256 |
+
61.223802899823404,
|
257 |
+
61.55162865256872
|
258 |
+
],
|
259 |
+
"ICAT Score": [
|
260 |
+
69.81244251426976,
|
261 |
+
69.45335700049607,
|
262 |
+
68.59131324849211,
|
263 |
+
71.92251317538863,
|
264 |
+
71.18914491114452,
|
265 |
+
70.32305624258731,
|
266 |
+
70.897553157834,
|
267 |
+
71.10798663454165,
|
268 |
+
70.27823670215446,
|
269 |
+
70.38279755431415
|
270 |
+
]
|
271 |
+
}
|
272 |
+
},
|
273 |
+
"metadata": {
|
274 |
+
"duration": {
|
275 |
+
"duration": [
|
276 |
+
26.866386890411377,
|
277 |
+
26.247880458831787,
|
278 |
+
26.342612504959106,
|
279 |
+
26.39614176750183,
|
280 |
+
27.073593854904175,
|
281 |
+
26.274484634399414,
|
282 |
+
26.274787664413452,
|
283 |
+
26.517155647277832,
|
284 |
+
26.309149503707886,
|
285 |
+
26.285419940948486
|
286 |
+
]
|
287 |
+
},
|
288 |
+
"losses": {
|
289 |
+
"validation_loss": [
|
290 |
+
3.0885992306730023,
|
291 |
+
3.091949881159741,
|
292 |
+
3.0981860931800758,
|
293 |
+
3.0845772096644275,
|
294 |
+
3.08029606886532,
|
295 |
+
3.1037382374639098,
|
296 |
+
3.0817413928716078,
|
297 |
+
3.0916762030642966,
|
298 |
+
3.103582669341046,
|
299 |
+
3.107291618637417
|
300 |
+
],
|
301 |
+
"eval_loss": [
|
302 |
+
3.0690226241298344,
|
303 |
+
3.0457953139491702,
|
304 |
+
3.058477856542753,
|
305 |
+
3.0494492716115453,
|
306 |
+
3.0524870473405588,
|
307 |
+
3.0668446246696557,
|
308 |
+
3.0485818563596063,
|
309 |
+
3.059026277972304,
|
310 |
+
3.073260214406511,
|
311 |
+
3.06602644674156
|
312 |
+
]
|
313 |
+
}
|
314 |
+
}
|
315 |
+
},
|
316 |
+
"average": {
|
317 |
+
"intrasentence": {
|
318 |
+
"gender": {
|
319 |
+
"Count": 765.0,
|
320 |
+
"LM Score": 92.53407131515829,
|
321 |
+
"SS Score": 61.47051101877188,
|
322 |
+
"ICAT Score": 71.31216326990395
|
323 |
+
},
|
324 |
+
"profession": {
|
325 |
+
"Count": 2430.0,
|
326 |
+
"LM Score": 92.8030439580886,
|
327 |
+
"SS Score": 62.46061501560621,
|
328 |
+
"ICAT Score": 69.67108320528362
|
329 |
+
},
|
330 |
+
"race": {
|
331 |
+
"Count": 2886.0,
|
332 |
+
"LM Score": 89.88839510624841,
|
333 |
+
"SS Score": 60.87272311371746,
|
334 |
+
"ICAT Score": 70.34037502321506
|
335 |
+
},
|
336 |
+
"religion": {
|
337 |
+
"Count": 237.0,
|
338 |
+
"LM Score": 87.04137931034482,
|
339 |
+
"SS Score": 57.268965517241384,
|
340 |
+
"ICAT Score": 74.37905350772888
|
341 |
+
},
|
342 |
+
"overall": {
|
343 |
+
"Count": 2106.0,
|
344 |
+
"LM Score": 91.22200498291414,
|
345 |
+
"SS Score": 61.41453783925898,
|
346 |
+
"ICAT Score": 70.39584011412227
|
347 |
+
}
|
348 |
+
},
|
349 |
+
"metadata": {
|
350 |
+
"duration": {
|
351 |
+
"duration": 26.458761286735534
|
352 |
+
},
|
353 |
+
"losses": {
|
354 |
+
"validation_loss": 3.0931638604920844,
|
355 |
+
"eval_loss": 3.0588971533723495
|
356 |
+
}
|
357 |
+
}
|
358 |
+
},
|
359 |
+
"std": {
|
360 |
+
"intrasentence": {
|
361 |
+
"gender": {
|
362 |
+
"Count": 0.0,
|
363 |
+
"LM Score": 0.7123380194683392,
|
364 |
+
"SS Score": 0.7855804853526456,
|
365 |
+
"ICAT Score": 1.8204195259923956
|
366 |
+
},
|
367 |
+
"profession": {
|
368 |
+
"Count": 0.0,
|
369 |
+
"LM Score": 0.3978768509623769,
|
370 |
+
"SS Score": 0.9495454625348791,
|
371 |
+
"ICAT Score": 1.6138026456827337
|
372 |
+
},
|
373 |
+
"race": {
|
374 |
+
"Count": 0.0,
|
375 |
+
"LM Score": 0.42259687676682406,
|
376 |
+
"SS Score": 0.5429179154524575,
|
377 |
+
"ICAT Score": 0.9294990644175601
|
378 |
+
},
|
379 |
+
"religion": {
|
380 |
+
"Count": 0.0,
|
381 |
+
"LM Score": 0.9401736767618437,
|
382 |
+
"SS Score": 2.2705569190003607,
|
383 |
+
"ICAT Score": 3.861903893320538
|
384 |
+
},
|
385 |
+
"overall": {
|
386 |
+
"Count": 0.0,
|
387 |
+
"LM Score": 0.36478754661787544,
|
388 |
+
"SS Score": 0.5171072048352101,
|
389 |
+
"ICAT Score": 0.9068171994588803
|
390 |
+
}
|
391 |
+
},
|
392 |
+
"metadata": {
|
393 |
+
"duration": {
|
394 |
+
"duration": 0.2701305562792103
|
395 |
+
},
|
396 |
+
"losses": {
|
397 |
+
"validation_loss": 0.009162376949856634,
|
398 |
+
"eval_loss": 0.009118222223262331
|
399 |
+
}
|
400 |
+
}
|
401 |
+
}
|
402 |
+
}
|
models/unprejudiced_full.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac8cba803b3ce66b1f1da95b130ffd19a1b2418729accf153becdda5a80a7b60
|
3 |
+
size 510403881
|
models/unprejudiced_ln.json
ADDED
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"combination": {
|
3 |
+
"num_epoches": 6,
|
4 |
+
"lr": 0.003,
|
5 |
+
"batch_size": 50,
|
6 |
+
"optimizer": "adam",
|
7 |
+
"in_net": false,
|
8 |
+
"in_net_init_identity": false,
|
9 |
+
"out_net": false,
|
10 |
+
"out_net_init_identity": false,
|
11 |
+
"freeze_ln": false,
|
12 |
+
"freeze_pos": true,
|
13 |
+
"freeze_wte": true,
|
14 |
+
"dup_lm_head": false,
|
15 |
+
"dup_lm_head_bias": false,
|
16 |
+
"freeze_ff": true,
|
17 |
+
"freeze_attn": true,
|
18 |
+
"model_save_path": "official-matrix-results/unprejudiced_ln"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"intrasentence": {
|
22 |
+
"gender": {
|
23 |
+
"Count": [
|
24 |
+
765.0,
|
25 |
+
765.0,
|
26 |
+
765.0,
|
27 |
+
765.0,
|
28 |
+
765.0,
|
29 |
+
765.0,
|
30 |
+
765.0,
|
31 |
+
765.0,
|
32 |
+
765.0,
|
33 |
+
765.0
|
34 |
+
],
|
35 |
+
"LM Score": [
|
36 |
+
93.3665623426493,
|
37 |
+
92.54642877251572,
|
38 |
+
91.95577313186008,
|
39 |
+
93.16290730203774,
|
40 |
+
93.00758798367494,
|
41 |
+
93.10249307858003,
|
42 |
+
92.15099087707782,
|
43 |
+
92.47608245216941,
|
44 |
+
92.31713270626315,
|
45 |
+
92.11677879286576
|
46 |
+
],
|
47 |
+
"SS Score": [
|
48 |
+
58.38937407198277,
|
49 |
+
59.15761774457427,
|
50 |
+
59.946636133592655,
|
51 |
+
60.61585950281602,
|
52 |
+
62.78687399556965,
|
53 |
+
59.70037756124712,
|
54 |
+
61.83304376782638,
|
55 |
+
59.555265990048596,
|
56 |
+
60.040410475193084,
|
57 |
+
58.716202477072045
|
58 |
+
],
|
59 |
+
"ICAT Score": [
|
60 |
+
77.70082199649761,
|
61 |
+
75.59633240603236,
|
62 |
+
73.66276081734392,
|
63 |
+
73.38282060619163,
|
64 |
+
69.22206182009276,
|
65 |
+
75.03990638346755,
|
66 |
+
70.34245671113719,
|
67 |
+
74.80341114120652,
|
68 |
+
73.77909458098806,
|
69 |
+
76.05860888298028
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"profession": {
|
73 |
+
"Count": [
|
74 |
+
2430.0,
|
75 |
+
2430.0,
|
76 |
+
2430.0,
|
77 |
+
2430.0,
|
78 |
+
2430.0,
|
79 |
+
2430.0,
|
80 |
+
2430.0,
|
81 |
+
2430.0,
|
82 |
+
2430.0,
|
83 |
+
2430.0
|
84 |
+
],
|
85 |
+
"LM Score": [
|
86 |
+
93.42155681049479,
|
87 |
+
93.55387767183865,
|
88 |
+
93.42505531815998,
|
89 |
+
93.59371897393662,
|
90 |
+
93.59776400546822,
|
91 |
+
93.68882725290892,
|
92 |
+
93.57614366131821,
|
93 |
+
93.62406293304402,
|
94 |
+
93.87962527193969,
|
95 |
+
93.71598926431035
|
96 |
+
],
|
97 |
+
"SS Score": [
|
98 |
+
60.77570766850481,
|
99 |
+
61.18993047556991,
|
100 |
+
60.96495959149169,
|
101 |
+
63.27688019928815,
|
102 |
+
60.133628838471765,
|
103 |
+
60.93779497884482,
|
104 |
+
61.674616442008755,
|
105 |
+
61.06836789643107,
|
106 |
+
61.11483773093695,
|
107 |
+
61.890074259366834
|
108 |
+
],
|
109 |
+
"ICAT Score": [
|
110 |
+
73.28788908796467,
|
111 |
+
72.61664993448171,
|
112 |
+
72.93701619022998,
|
113 |
+
68.74106708948065,
|
114 |
+
74.62806399462248,
|
115 |
+
73.19384356689437,
|
116 |
+
71.72683195395423,
|
117 |
+
72.89875148301309,
|
118 |
+
73.01048924916414,
|
119 |
+
71.43018783145685
|
120 |
+
]
|
121 |
+
},
|
122 |
+
"race": {
|
123 |
+
"Count": [
|
124 |
+
2886.0,
|
125 |
+
2886.0,
|
126 |
+
2886.0,
|
127 |
+
2886.0,
|
128 |
+
2886.0,
|
129 |
+
2886.0,
|
130 |
+
2886.0,
|
131 |
+
2886.0,
|
132 |
+
2886.0,
|
133 |
+
2886.0
|
134 |
+
],
|
135 |
+
"LM Score": [
|
136 |
+
90.96165616577254,
|
137 |
+
91.35420776444788,
|
138 |
+
91.32908615666499,
|
139 |
+
91.41698192444424,
|
140 |
+
91.17563117040191,
|
141 |
+
91.65110416409732,
|
142 |
+
91.81221818459989,
|
143 |
+
91.66730060218228,
|
144 |
+
91.86267003440439,
|
145 |
+
91.47314873035648
|
146 |
+
],
|
147 |
+
"SS Score": [
|
148 |
+
62.3910273260719,
|
149 |
+
62.51949918790423,
|
150 |
+
61.190879895946715,
|
151 |
+
62.2345679689098,
|
152 |
+
61.833629820428754,
|
153 |
+
61.563065847493526,
|
154 |
+
61.93683240101294,
|
155 |
+
61.59105021420435,
|
156 |
+
61.03402466000601,
|
157 |
+
61.00815812609043
|
158 |
+
],
|
159 |
+
"ICAT Score": [
|
160 |
+
68.41948882227565,
|
161 |
+
68.48002916607508,
|
162 |
+
70.88802947294883,
|
163 |
+
69.04803634710001,
|
164 |
+
69.59685781211229,
|
165 |
+
70.45574911519842,
|
166 |
+
69.89327696790386,
|
167 |
+
70.41689491657309,
|
168 |
+
71.59037070453213,
|
169 |
+
71.33413102005345
|
170 |
+
]
|
171 |
+
},
|
172 |
+
"religion": {
|
173 |
+
"Count": [
|
174 |
+
237.0,
|
175 |
+
237.0,
|
176 |
+
237.0,
|
177 |
+
237.0,
|
178 |
+
237.0,
|
179 |
+
237.0,
|
180 |
+
237.0,
|
181 |
+
237.0,
|
182 |
+
237.0,
|
183 |
+
237.0
|
184 |
+
],
|
185 |
+
"LM Score": [
|
186 |
+
89.21839080459772,
|
187 |
+
87.88505747126437,
|
188 |
+
86.73563218390804,
|
189 |
+
89.88505747126437,
|
190 |
+
89.79310344827586,
|
191 |
+
88.45977011494251,
|
192 |
+
89.79310344827586,
|
193 |
+
88.55172413793105,
|
194 |
+
88.55172413793105,
|
195 |
+
88.55172413793105
|
196 |
+
],
|
197 |
+
"SS Score": [
|
198 |
+
62.98850574712643,
|
199 |
+
58.02298850574713,
|
200 |
+
56.6896551724138,
|
201 |
+
59.17241379310345,
|
202 |
+
56.87356321839081,
|
203 |
+
60.6896551724138,
|
204 |
+
59.54022988505747,
|
205 |
+
57.839080459770116,
|
206 |
+
57.05747126436781,
|
207 |
+
56.87356321839081
|
208 |
+
],
|
209 |
+
"ICAT Score": [
|
210 |
+
66.04211917029993,
|
211 |
+
73.78304135288677,
|
212 |
+
75.13100277447481,
|
213 |
+
73.39579865239794,
|
214 |
+
77.44913198573127,
|
215 |
+
69.5476813317479,
|
216 |
+
72.66016646848989,
|
217 |
+
74.66844233055888,
|
218 |
+
76.05269916765756,
|
219 |
+
76.3784066587396
|
220 |
+
]
|
221 |
+
},
|
222 |
+
"overall": {
|
223 |
+
"Count": [
|
224 |
+
2106.0,
|
225 |
+
2106.0,
|
226 |
+
2106.0,
|
227 |
+
2106.0,
|
228 |
+
2106.0,
|
229 |
+
2106.0,
|
230 |
+
2106.0,
|
231 |
+
2106.0,
|
232 |
+
2106.0,
|
233 |
+
2106.0
|
234 |
+
],
|
235 |
+
"LM Score": [
|
236 |
+
92.13401420408785,
|
237 |
+
92.20869961790169,
|
238 |
+
92.0299163171527,
|
239 |
+
92.40641979661093,
|
240 |
+
92.27482066430497,
|
241 |
+
92.48745327373923,
|
242 |
+
92.44827067849046,
|
243 |
+
92.39443932411862,
|
244 |
+
92.56039720092626,
|
245 |
+
92.29539231094425
|
246 |
+
],
|
247 |
+
"SS Score": [
|
248 |
+
61.2937654652524,
|
249 |
+
61.41829149360298,
|
250 |
+
60.776655580405084,
|
251 |
+
62.3091985983148,
|
252 |
+
61.12036668734757,
|
253 |
+
61.05667039246612,
|
254 |
+
61.733108696584374,
|
255 |
+
60.99238918840615,
|
256 |
+
60.7879308637134,
|
257 |
+
60.895932085394335
|
258 |
+
],
|
259 |
+
"ICAT Score": [
|
260 |
+
71.32321524822382,
|
261 |
+
71.1513834082361,
|
262 |
+
72.19442209228357,
|
263 |
+
69.65744033589628,
|
264 |
+
71.7522238283788,
|
265 |
+
72.03538754801231,
|
266 |
+
70.7541585048508,
|
267 |
+
72.08172660621284,
|
268 |
+
72.58969388649739,
|
269 |
+
72.18250578264676
|
270 |
+
]
|
271 |
+
}
|
272 |
+
},
|
273 |
+
"metadata": {
|
274 |
+
"duration": {
|
275 |
+
"duration": [
|
276 |
+
57.25840997695923,
|
277 |
+
55.863980293273926,
|
278 |
+
54.31034064292908,
|
279 |
+
54.915284156799316,
|
280 |
+
53.84601712226868,
|
281 |
+
53.912495374679565,
|
282 |
+
53.90212535858154,
|
283 |
+
54.03045678138733,
|
284 |
+
54.102956771850586,
|
285 |
+
53.75367736816406
|
286 |
+
]
|
287 |
+
},
|
288 |
+
"losses": {
|
289 |
+
"validation_loss": [
|
290 |
+
3.3091299150301063,
|
291 |
+
3.307041087357894,
|
292 |
+
3.3256353038808575,
|
293 |
+
3.3101960970007855,
|
294 |
+
3.2923448684422865,
|
295 |
+
3.298698979616165,
|
296 |
+
3.305526672238889,
|
297 |
+
3.3278808785521465,
|
298 |
+
3.308232697455779,
|
299 |
+
3.3029724375061367
|
300 |
+
],
|
301 |
+
"eval_loss": [
|
302 |
+
3.312717050054799,
|
303 |
+
3.3115605367266614,
|
304 |
+
3.3303229648133983,
|
305 |
+
3.3192637523879176,
|
306 |
+
3.3043171120726544,
|
307 |
+
3.309308815261592,
|
308 |
+
3.3165352562199466,
|
309 |
+
3.3334239407725956,
|
310 |
+
3.3140551072099935,
|
311 |
+
3.3061055836470232
|
312 |
+
]
|
313 |
+
}
|
314 |
+
}
|
315 |
+
},
|
316 |
+
"average": {
|
317 |
+
"intrasentence": {
|
318 |
+
"gender": {
|
319 |
+
"Count": 765.0,
|
320 |
+
"LM Score": 92.62027374396938,
|
321 |
+
"SS Score": 60.07416617199226,
|
322 |
+
"ICAT Score": 73.95882753459378
|
323 |
+
},
|
324 |
+
"profession": {
|
325 |
+
"Count": 2430.0,
|
326 |
+
"LM Score": 93.60766211634193,
|
327 |
+
"SS Score": 61.302679808091476,
|
328 |
+
"ICAT Score": 72.44707903812622
|
329 |
+
},
|
330 |
+
"race": {
|
331 |
+
"Count": 2886.0,
|
332 |
+
"LM Score": 91.47040048973719,
|
333 |
+
"SS Score": 61.730273544806856,
|
334 |
+
"ICAT Score": 70.01228643447728
|
335 |
+
},
|
336 |
+
"religion": {
|
337 |
+
"Count": 237.0,
|
338 |
+
"LM Score": 88.74252873563219,
|
339 |
+
"SS Score": 58.57471264367816,
|
340 |
+
"ICAT Score": 73.51084898929847
|
341 |
+
},
|
342 |
+
"overall": {
|
343 |
+
"Count": 2106.0,
|
344 |
+
"LM Score": 92.32398233882769,
|
345 |
+
"SS Score": 61.23843090514872,
|
346 |
+
"ICAT Score": 71.57221572412386
|
347 |
+
}
|
348 |
+
},
|
349 |
+
"metadata": {
|
350 |
+
"duration": {
|
351 |
+
"duration": 54.58957438468933
|
352 |
+
},
|
353 |
+
"losses": {
|
354 |
+
"validation_loss": 3.3087658937081046,
|
355 |
+
"eval_loss": 3.3157610119166585
|
356 |
+
}
|
357 |
+
}
|
358 |
+
},
|
359 |
+
"std": {
|
360 |
+
"intrasentence": {
|
361 |
+
"gender": {
|
362 |
+
"Count": 0.0,
|
363 |
+
"LM Score": 0.4761700319667935,
|
364 |
+
"SS Score": 1.2920872771127665,
|
365 |
+
"ICAT Score": 2.4277955061750927
|
366 |
+
},
|
367 |
+
"profession": {
|
368 |
+
"Count": 0.0,
|
369 |
+
"LM Score": 0.12834114152345275,
|
370 |
+
"SS Score": 0.7984554530030804,
|
371 |
+
"ICAT Score": 1.4865696787919611
|
372 |
+
},
|
373 |
+
"race": {
|
374 |
+
"Count": 0.0,
|
375 |
+
"LM Score": 0.2691058396387355,
|
376 |
+
"SS Score": 0.5207336044406384,
|
377 |
+
"ICAT Score": 1.0661264047310255
|
378 |
+
},
|
379 |
+
"religion": {
|
380 |
+
"Count": 0.0,
|
381 |
+
"LM Score": 0.9322710675411378,
|
382 |
+
"SS Score": 1.942494728669204,
|
383 |
+
"ICAT Score": 3.262797755197479
|
384 |
+
},
|
385 |
+
"overall": {
|
386 |
+
"Count": 0.0,
|
387 |
+
"LM Score": 0.15776866900014336,
|
388 |
+
"SS Score": 0.4547159623512881,
|
389 |
+
"ICAT Score": 0.8292208633703583
|
390 |
+
}
|
391 |
+
},
|
392 |
+
"metadata": {
|
393 |
+
"duration": {
|
394 |
+
"duration": 1.079948366638936
|
395 |
+
},
|
396 |
+
"losses": {
|
397 |
+
"validation_loss": 0.010346238030998984,
|
398 |
+
"eval_loss": 0.009124357291769709
|
399 |
+
}
|
400 |
+
}
|
401 |
+
}
|
402 |
+
}
|
models/unprejudiced_ln.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a9212f40e0e4db4b474107965b989bccc1a0a8e59e54f2316b64db5a7c9a2096
|
3 |
+
size 510403881
|
models/unprejudiced_ln_wpe.json
ADDED
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"combination": {
|
3 |
+
"num_epoches": 6,
|
4 |
+
"lr": 0.003,
|
5 |
+
"batch_size": 50,
|
6 |
+
"optimizer": "adam",
|
7 |
+
"in_net": false,
|
8 |
+
"in_net_init_identity": false,
|
9 |
+
"out_net": false,
|
10 |
+
"out_net_init_identity": false,
|
11 |
+
"freeze_ln": false,
|
12 |
+
"freeze_pos": false,
|
13 |
+
"freeze_wte": true,
|
14 |
+
"dup_lm_head": false,
|
15 |
+
"dup_lm_head_bias": false,
|
16 |
+
"freeze_ff": true,
|
17 |
+
"freeze_attn": true,
|
18 |
+
"model_save_path": "official-matrix-results/unprejudiced_ln_wpe"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"intrasentence": {
|
22 |
+
"gender": {
|
23 |
+
"Count": [
|
24 |
+
765.0,
|
25 |
+
765.0,
|
26 |
+
765.0,
|
27 |
+
765.0,
|
28 |
+
765.0,
|
29 |
+
765.0,
|
30 |
+
765.0,
|
31 |
+
765.0,
|
32 |
+
765.0,
|
33 |
+
765.0
|
34 |
+
],
|
35 |
+
"LM Score": [
|
36 |
+
92.7176858568163,
|
37 |
+
93.08322900931596,
|
38 |
+
92.32134886047929,
|
39 |
+
92.62795891708936,
|
40 |
+
92.12632721545765,
|
41 |
+
92.80210144123187,
|
42 |
+
92.41691625604669,
|
43 |
+
92.60210144123188,
|
44 |
+
93.01657899266596,
|
45 |
+
92.42447110055807
|
46 |
+
],
|
47 |
+
"SS Score": [
|
48 |
+
58.558831667527315,
|
49 |
+
60.21846430542083,
|
50 |
+
58.20878654356915,
|
51 |
+
62.85139096008662,
|
52 |
+
58.70711784190045,
|
53 |
+
60.08912842391103,
|
54 |
+
60.473775982471636,
|
55 |
+
62.21483492353057,
|
56 |
+
61.9558024905851,
|
57 |
+
60.145115753811396
|
58 |
+
],
|
59 |
+
"ICAT Score": [
|
60 |
+
76.84658453979293,
|
61 |
+
74.0598759480158,
|
62 |
+
77.16442393627818,
|
63 |
+
68.81999663952223,
|
64 |
+
76.08323146732825,
|
65 |
+
74.07625505224374,
|
66 |
+
73.05783469891321,
|
67 |
+
69.9797137876983,
|
68 |
+
70.77482205694153,
|
69 |
+
73.67133194455891
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"profession": {
|
73 |
+
"Count": [
|
74 |
+
2430.0,
|
75 |
+
2430.0,
|
76 |
+
2430.0,
|
77 |
+
2430.0,
|
78 |
+
2430.0,
|
79 |
+
2430.0,
|
80 |
+
2430.0,
|
81 |
+
2430.0,
|
82 |
+
2430.0,
|
83 |
+
2430.0
|
84 |
+
],
|
85 |
+
"LM Score": [
|
86 |
+
93.12784372674922,
|
87 |
+
94.06731011795789,
|
88 |
+
93.69368564200676,
|
89 |
+
93.85522925235688,
|
90 |
+
93.82916928497134,
|
91 |
+
93.48870087830163,
|
92 |
+
93.40373500784071,
|
93 |
+
94.15669883394328,
|
94 |
+
94.25990487137251,
|
95 |
+
93.77967183691065
|
96 |
+
],
|
97 |
+
"SS Score": [
|
98 |
+
62.508055021355624,
|
99 |
+
61.84743560382031,
|
100 |
+
61.21201259239156,
|
101 |
+
60.50192891774077,
|
102 |
+
61.25949015855352,
|
103 |
+
60.15307657034453,
|
104 |
+
59.480325304676036,
|
105 |
+
61.2593173087823,
|
106 |
+
61.81040767083471,
|
107 |
+
61.70338192489153
|
108 |
+
],
|
109 |
+
"ICAT Score": [
|
110 |
+
69.83087985966147,
|
111 |
+
71.77818213701588,
|
112 |
+
72.68378997709164,
|
113 |
+
74.14201032902655,
|
114 |
+
72.69979712198361,
|
115 |
+
74.50474210871297,
|
116 |
+
75.69377915691896,
|
117 |
+
72.95389585556688,
|
118 |
+
71.99494680047236,
|
119 |
+
71.82888551094346
|
120 |
+
]
|
121 |
+
},
|
122 |
+
"race": {
|
123 |
+
"Count": [
|
124 |
+
2886.0,
|
125 |
+
2886.0,
|
126 |
+
2886.0,
|
127 |
+
2886.0,
|
128 |
+
2886.0,
|
129 |
+
2886.0,
|
130 |
+
2886.0,
|
131 |
+
2886.0,
|
132 |
+
2886.0,
|
133 |
+
2886.0
|
134 |
+
],
|
135 |
+
"LM Score": [
|
136 |
+
91.0223231488533,
|
137 |
+
91.21319454686072,
|
138 |
+
91.39933271336287,
|
139 |
+
91.74224982225451,
|
140 |
+
90.88554271674474,
|
141 |
+
91.26214929907371,
|
142 |
+
91.26512135185419,
|
143 |
+
91.49484101667923,
|
144 |
+
91.64405301363871,
|
145 |
+
91.34223564879609
|
146 |
+
],
|
147 |
+
"SS Score": [
|
148 |
+
61.834064618059706,
|
149 |
+
60.82400200173773,
|
150 |
+
62.92807397744108,
|
151 |
+
60.861502629894645,
|
152 |
+
61.562841156193,
|
153 |
+
60.723637986731575,
|
154 |
+
60.68541428392551,
|
155 |
+
60.35250135410068,
|
156 |
+
62.66790705128793,
|
157 |
+
61.36633712121035
|
158 |
+
],
|
159 |
+
"ICAT Score": [
|
160 |
+
69.47904207226446,
|
161 |
+
71.46735853965845,
|
162 |
+
67.76698601722076,
|
163 |
+
71.81307606791714,
|
164 |
+
69.86764084018247,
|
165 |
+
71.6889042795874,
|
166 |
+
71.76100872550823,
|
167 |
+
72.55083170631127,
|
168 |
+
68.42528610603712,
|
169 |
+
70.577702773011
|
170 |
+
]
|
171 |
+
},
|
172 |
+
"religion": {
|
173 |
+
"Count": [
|
174 |
+
237.0,
|
175 |
+
237.0,
|
176 |
+
237.0,
|
177 |
+
237.0,
|
178 |
+
237.0,
|
179 |
+
237.0,
|
180 |
+
237.0,
|
181 |
+
237.0,
|
182 |
+
237.0,
|
183 |
+
237.0
|
184 |
+
],
|
185 |
+
"LM Score": [
|
186 |
+
87.97701149425286,
|
187 |
+
88.55172413793105,
|
188 |
+
88.55172413793105,
|
189 |
+
87.88505747126437,
|
190 |
+
87.97701149425286,
|
191 |
+
88.55172413793105,
|
192 |
+
89.88505747126437,
|
193 |
+
89.21839080459772,
|
194 |
+
87.97701149425286,
|
195 |
+
87.88505747126437
|
196 |
+
],
|
197 |
+
"SS Score": [
|
198 |
+
56.6896551724138,
|
199 |
+
60.50574712643678,
|
200 |
+
59.17241379310345,
|
201 |
+
58.02298850574713,
|
202 |
+
57.05747126436781,
|
203 |
+
55.54022988505747,
|
204 |
+
58.02298850574713,
|
205 |
+
60.50574712643678,
|
206 |
+
59.356321839080465,
|
207 |
+
54.206896551724135
|
208 |
+
],
|
209 |
+
"ICAT Score": [
|
210 |
+
76.20629409433212,
|
211 |
+
69.94568370986921,
|
212 |
+
72.30706302021403,
|
213 |
+
73.78304135288677,
|
214 |
+
75.55910688333994,
|
215 |
+
78.73978596908444,
|
216 |
+
75.46212181265689,
|
217 |
+
70.4722737481834,
|
218 |
+
71.51418681463863,
|
219 |
+
80.49059056678558
|
220 |
+
]
|
221 |
+
},
|
222 |
+
"overall": {
|
223 |
+
"Count": [
|
224 |
+
2106.0,
|
225 |
+
2106.0,
|
226 |
+
2106.0,
|
227 |
+
2106.0,
|
228 |
+
2106.0,
|
229 |
+
2106.0,
|
230 |
+
2106.0,
|
231 |
+
2106.0,
|
232 |
+
2106.0,
|
233 |
+
2106.0
|
234 |
+
],
|
235 |
+
"LM Score": [
|
236 |
+
91.92084605331794,
|
237 |
+
92.43268062952754,
|
238 |
+
92.27917984759306,
|
239 |
+
92.51028649058932,
|
240 |
+
92.04998636695296,
|
241 |
+
92.19967832837742,
|
242 |
+
92.17064245412953,
|
243 |
+
92.55938517018812,
|
244 |
+
92.67189720305808,
|
245 |
+
92.27355090991587
|
246 |
+
],
|
247 |
+
"SS Score": [
|
248 |
+
61.4800665706751,
|
249 |
+
61.12391170393275,
|
250 |
+
61.53640693385606,
|
251 |
+
60.869048573753645,
|
252 |
+
60.915070615084325,
|
253 |
+
60.22981314591084,
|
254 |
+
60.09989112282983,
|
255 |
+
60.93841972943936,
|
256 |
+
62.12637815689239,
|
257 |
+
61.067866345615265
|
258 |
+
],
|
259 |
+
"ICAT Score": [
|
260 |
+
70.81569741482059,
|
261 |
+
71.86842107191396,
|
262 |
+
70.9877764427066,
|
263 |
+
72.40031054182771,
|
264 |
+
71.95534434069614,
|
265 |
+
73.3359687001297,
|
266 |
+
73.55237338396984,
|
267 |
+
72.31031707238087,
|
268 |
+
70.19640780303926,
|
269 |
+
71.84812433579037
|
270 |
+
]
|
271 |
+
}
|
272 |
+
},
|
273 |
+
"metadata": {
|
274 |
+
"duration": {
|
275 |
+
"duration": [
|
276 |
+
53.742661237716675,
|
277 |
+
56.175448417663574,
|
278 |
+
54.007455348968506,
|
279 |
+
53.96895956993103,
|
280 |
+
53.936455965042114,
|
281 |
+
53.89695692062378,
|
282 |
+
53.804457902908325,
|
283 |
+
53.90851712226868,
|
284 |
+
53.96249437332153,
|
285 |
+
53.888397455215454
|
286 |
+
]
|
287 |
+
},
|
288 |
+
"losses": {
|
289 |
+
"validation_loss": [
|
290 |
+
3.3056258696576823,
|
291 |
+
3.301140990982885,
|
292 |
+
3.3110471544058426,
|
293 |
+
3.306687390804291,
|
294 |
+
3.31727870080782,
|
295 |
+
3.2936707496643067,
|
296 |
+
3.32195488292238,
|
297 |
+
3.3019997322041057,
|
298 |
+
3.2873675157194553,
|
299 |
+
3.313567125538121
|
300 |
+
],
|
301 |
+
"eval_loss": [
|
302 |
+
3.3179407469604327,
|
303 |
+
3.3158180734385616,
|
304 |
+
3.320703720268996,
|
305 |
+
3.311780313823534,
|
306 |
+
3.3149880080119423,
|
307 |
+
3.3110294715217923,
|
308 |
+
3.323618240200955,
|
309 |
+
3.318548839506896,
|
310 |
+
3.3115098657815354,
|
311 |
+
3.3291698354741803
|
312 |
+
]
|
313 |
+
}
|
314 |
+
}
|
315 |
+
},
|
316 |
+
"average": {
|
317 |
+
"intrasentence": {
|
318 |
+
"gender": {
|
319 |
+
"Count": 765.0,
|
320 |
+
"LM Score": 92.6138719090893,
|
321 |
+
"SS Score": 60.342324889281414,
|
322 |
+
"ICAT Score": 73.4534070071293
|
323 |
+
},
|
324 |
+
"profession": {
|
325 |
+
"Count": 2430.0,
|
326 |
+
"LM Score": 93.7661949452411,
|
327 |
+
"SS Score": 61.17354310733909,
|
328 |
+
"ICAT Score": 72.81109088573938
|
329 |
+
},
|
330 |
+
"race": {
|
331 |
+
"Count": 2886.0,
|
332 |
+
"LM Score": 91.3271043278118,
|
333 |
+
"SS Score": 61.38062821805822,
|
334 |
+
"ICAT Score": 70.53978371276983
|
335 |
+
},
|
336 |
+
"religion": {
|
337 |
+
"Count": 237.0,
|
338 |
+
"LM Score": 88.44597701149426,
|
339 |
+
"SS Score": 57.9080459770115,
|
340 |
+
"ICAT Score": 74.44801479719911
|
341 |
+
},
|
342 |
+
"overall": {
|
343 |
+
"Count": 2106.0,
|
344 |
+
"LM Score": 92.30681334536499,
|
345 |
+
"SS Score": 61.03868728979895,
|
346 |
+
"ICAT Score": 71.92707411072752
|
347 |
+
}
|
348 |
+
},
|
349 |
+
"metadata": {
|
350 |
+
"duration": {
|
351 |
+
"duration": 54.12918043136597
|
352 |
+
},
|
353 |
+
"losses": {
|
354 |
+
"validation_loss": 3.3060340112706896,
|
355 |
+
"eval_loss": 3.3175107114988824
|
356 |
+
}
|
357 |
+
}
|
358 |
+
},
|
359 |
+
"std": {
|
360 |
+
"intrasentence": {
|
361 |
+
"gender": {
|
362 |
+
"Count": 0.0,
|
363 |
+
"LM Score": 0.28756695232787854,
|
364 |
+
"SS Score": 1.5148390378431047,
|
365 |
+
"ICAT Score": 2.7179191241416563
|
366 |
+
},
|
367 |
+
"profession": {
|
368 |
+
"Count": 0.0,
|
369 |
+
"LM Score": 0.3346815901752131,
|
370 |
+
"SS Score": 0.8542251986955223,
|
371 |
+
"ICAT Score": 1.5677389639330035
|
372 |
+
},
|
373 |
+
"race": {
|
374 |
+
"Count": 0.0,
|
375 |
+
"LM Score": 0.24805008587550623,
|
376 |
+
"SS Score": 0.8271646196178791,
|
377 |
+
"ICAT Score": 1.5190702760512533
|
378 |
+
},
|
379 |
+
"religion": {
|
380 |
+
"Count": 0.0,
|
381 |
+
"LM Score": 0.6316779584586937,
|
382 |
+
"SS Score": 1.9680362966275398,
|
383 |
+
"ICAT Score": 3.31658287105069
|
384 |
+
},
|
385 |
+
"overall": {
|
386 |
+
"Count": 0.0,
|
387 |
+
"LM Score": 0.22386243868920438,
|
388 |
+
"SS Score": 0.5677547126853678,
|
389 |
+
"ICAT Score": 1.006008258851537
|
390 |
+
}
|
391 |
+
},
|
392 |
+
"metadata": {
|
393 |
+
"duration": {
|
394 |
+
"duration": 0.6861534564374508
|
395 |
+
},
|
396 |
+
"losses": {
|
397 |
+
"validation_loss": 0.010019458292857046,
|
398 |
+
"eval_loss": 0.005520671688763776
|
399 |
+
}
|
400 |
+
}
|
401 |
+
}
|
402 |
+
}
|
models/unprejudiced_ln_wpe.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:838da1fcf78aafbe0875b4132609aa06ef1ac1f8b7d90e7063da4c9faf08d977
|
3 |
+
size 510403881
|
models/unprejudiced_ln_wpe_wte.json
ADDED
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"combination": {
|
3 |
+
"num_epoches": 2,
|
4 |
+
"lr": 0.0005,
|
5 |
+
"batch_size": 50,
|
6 |
+
"optimizer": "adam",
|
7 |
+
"in_net": false,
|
8 |
+
"in_net_init_identity": false,
|
9 |
+
"out_net": false,
|
10 |
+
"out_net_init_identity": false,
|
11 |
+
"freeze_ln": false,
|
12 |
+
"freeze_pos": false,
|
13 |
+
"freeze_wte": false,
|
14 |
+
"dup_lm_head": false,
|
15 |
+
"dup_lm_head_bias": false,
|
16 |
+
"freeze_ff": true,
|
17 |
+
"freeze_attn": true,
|
18 |
+
"model_save_path": "official-matrix-results/unprejudiced_ln_wpe_wte"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"intrasentence": {
|
22 |
+
"gender": {
|
23 |
+
"Count": [
|
24 |
+
765.0,
|
25 |
+
765.0,
|
26 |
+
765.0,
|
27 |
+
765.0,
|
28 |
+
765.0,
|
29 |
+
765.0,
|
30 |
+
765.0,
|
31 |
+
765.0,
|
32 |
+
765.0,
|
33 |
+
765.0
|
34 |
+
],
|
35 |
+
"LM Score": [
|
36 |
+
92.43129873347264,
|
37 |
+
91.43259373476765,
|
38 |
+
92.13629743847136,
|
39 |
+
91.20926151143541,
|
40 |
+
91.21010326227717,
|
41 |
+
90.91594371811763,
|
42 |
+
92.16193846411238,
|
43 |
+
90.7934365956105,
|
44 |
+
91.49714029931422,
|
45 |
+
92.1860809882549
|
46 |
+
],
|
47 |
+
"SS Score": [
|
48 |
+
66.29504215591172,
|
49 |
+
64.59507545594502,
|
50 |
+
64.75800511887468,
|
51 |
+
65.29829815916771,
|
52 |
+
65.54005640092598,
|
53 |
+
63.945917206786774,
|
54 |
+
64.58617694704652,
|
55 |
+
63.05985254680907,
|
56 |
+
64.32537108624065,
|
57 |
+
64.70672306759263
|
58 |
+
],
|
59 |
+
"ICAT Score": [
|
60 |
+
62.30786054572053,
|
61 |
+
64.74328164093365,
|
62 |
+
64.94133845384896,
|
63 |
+
63.302331961846626,
|
64 |
+
62.86190028167578,
|
65 |
+
65.55781924072254,
|
66 |
+
65.27613161970525,
|
67 |
+
67.07845871247586,
|
68 |
+
65.28253053696422,
|
69 |
+
65.07097771263628
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"profession": {
|
73 |
+
"Count": [
|
74 |
+
2430.0,
|
75 |
+
2430.0,
|
76 |
+
2430.0,
|
77 |
+
2430.0,
|
78 |
+
2430.0,
|
79 |
+
2430.0,
|
80 |
+
2430.0,
|
81 |
+
2430.0,
|
82 |
+
2430.0,
|
83 |
+
2430.0
|
84 |
+
],
|
85 |
+
"LM Score": [
|
86 |
+
91.50663838517907,
|
87 |
+
91.3841405897646,
|
88 |
+
91.54163846119926,
|
89 |
+
92.10871917475984,
|
90 |
+
91.7002340641914,
|
91 |
+
91.72268326516071,
|
92 |
+
91.71685663914732,
|
93 |
+
92.07914694037439,
|
94 |
+
91.62663625661372,
|
95 |
+
91.73828749118161
|
96 |
+
],
|
97 |
+
"SS Score": [
|
98 |
+
60.91353813513963,
|
99 |
+
61.44278715023322,
|
100 |
+
61.13515981056566,
|
101 |
+
61.638794206229036,
|
102 |
+
60.299569945684794,
|
103 |
+
61.1775280082693,
|
104 |
+
60.962123282431676,
|
105 |
+
61.153088803370316,
|
106 |
+
60.77432768084297,
|
107 |
+
61.70749997537894
|
108 |
+
],
|
109 |
+
"ICAT Score": [
|
110 |
+
71.53341463247739,
|
111 |
+
70.4703551962513,
|
112 |
+
71.1550229894697,
|
113 |
+
70.6680306332724,
|
114 |
+
72.81077456859526,
|
115 |
+
71.21802604136175,
|
116 |
+
71.60862684803843,
|
117 |
+
71.53980888508279,
|
118 |
+
71.88232819017045,
|
119 |
+
70.25776752029532
|
120 |
+
]
|
121 |
+
},
|
122 |
+
"race": {
|
123 |
+
"Count": [
|
124 |
+
2886.0,
|
125 |
+
2886.0,
|
126 |
+
2886.0,
|
127 |
+
2886.0,
|
128 |
+
2886.0,
|
129 |
+
2886.0,
|
130 |
+
2886.0,
|
131 |
+
2886.0,
|
132 |
+
2886.0,
|
133 |
+
2886.0
|
134 |
+
],
|
135 |
+
"LM Score": [
|
136 |
+
89.39366288698176,
|
137 |
+
88.91691870810712,
|
138 |
+
88.8148278711025,
|
139 |
+
88.65197683325145,
|
140 |
+
88.94004894408926,
|
141 |
+
88.98160530751596,
|
142 |
+
88.98987712828776,
|
143 |
+
88.74682356747407,
|
144 |
+
88.93669443046905,
|
145 |
+
88.60432344793122
|
146 |
+
],
|
147 |
+
"SS Score": [
|
148 |
+
60.59579079694762,
|
149 |
+
60.39917866627228,
|
150 |
+
59.61793655894321,
|
151 |
+
60.443527899767524,
|
152 |
+
59.37404465377777,
|
153 |
+
60.16753875892854,
|
154 |
+
59.72543458213392,
|
155 |
+
59.91491727646836,
|
156 |
+
60.339473040629855,
|
157 |
+
59.782459993998046
|
158 |
+
],
|
159 |
+
"ICAT Score": [
|
160 |
+
70.44973187651537,
|
161 |
+
70.42366022610683,
|
162 |
+
71.730520271948,
|
163 |
+
70.13518896469934,
|
164 |
+
72.26548913786779,
|
165 |
+
70.88712689159895,
|
166 |
+
71.68057255862179,
|
167 |
+
71.14847528305731,
|
168 |
+
70.54552334273765,
|
169 |
+
71.26895845943822
|
170 |
+
]
|
171 |
+
},
|
172 |
+
"religion": {
|
173 |
+
"Count": [
|
174 |
+
237.0,
|
175 |
+
237.0,
|
176 |
+
237.0,
|
177 |
+
237.0,
|
178 |
+
237.0,
|
179 |
+
237.0,
|
180 |
+
237.0,
|
181 |
+
237.0,
|
182 |
+
237.0,
|
183 |
+
237.0
|
184 |
+
],
|
185 |
+
"LM Score": [
|
186 |
+
86.16091954022988,
|
187 |
+
84.16091954022988,
|
188 |
+
86.06896551724138,
|
189 |
+
85.49425287356321,
|
190 |
+
85.49425287356321,
|
191 |
+
85.49425287356321,
|
192 |
+
85.40229885057471,
|
193 |
+
85.49425287356321,
|
194 |
+
85.49425287356321,
|
195 |
+
86.16091954022988
|
196 |
+
],
|
197 |
+
"SS Score": [
|
198 |
+
56.50574712643678,
|
199 |
+
56.50574712643678,
|
200 |
+
54.02298850574713,
|
201 |
+
56.6896551724138,
|
202 |
+
55.356321839080465,
|
203 |
+
55.17241379310345,
|
204 |
+
55.356321839080465,
|
205 |
+
55.17241379310345,
|
206 |
+
57.839080459770116,
|
207 |
+
57.839080459770116
|
208 |
+
],
|
209 |
+
"ICAT Score": [
|
210 |
+
74.95009644602986,
|
211 |
+
73.21032633108733,
|
212 |
+
79.14387633769323,
|
213 |
+
74.05571145461751,
|
214 |
+
76.33555819791253,
|
215 |
+
76.65001981767736,
|
216 |
+
76.25345488175452,
|
217 |
+
76.65001981767736,
|
218 |
+
72.09032633108733,
|
219 |
+
72.65247192495706
|
220 |
+
]
|
221 |
+
},
|
222 |
+
"overall": {
|
223 |
+
"Count": [
|
224 |
+
2106.0,
|
225 |
+
2106.0,
|
226 |
+
2106.0,
|
227 |
+
2106.0,
|
228 |
+
2106.0,
|
229 |
+
2106.0,
|
230 |
+
2106.0,
|
231 |
+
2106.0,
|
232 |
+
2106.0,
|
233 |
+
2106.0
|
234 |
+
],
|
235 |
+
"LM Score": [
|
236 |
+
90.4578071068624,
|
237 |
+
89.99167072345773,
|
238 |
+
90.1664914953431,
|
239 |
+
90.16845715158091,
|
240 |
+
90.14471614122048,
|
241 |
+
90.13494284591468,
|
242 |
+
90.29072856943831,
|
243 |
+
90.14781242046953,
|
244 |
+
90.15157276973581,
|
245 |
+
90.15503591600266
|
246 |
+
],
|
247 |
+
"SS Score": [
|
248 |
+
61.28256045168015,
|
249 |
+
61.178760030779216,
|
250 |
+
60.632272495505234,
|
251 |
+
61.36940224920786,
|
252 |
+
60.3534460181398,
|
253 |
+
60.839666443249165,
|
254 |
+
60.64443137237314,
|
255 |
+
60.60310699875154,
|
256 |
+
60.914200153540264,
|
257 |
+
61.0630130519055
|
258 |
+
],
|
259 |
+
"ICAT Score": [
|
260 |
+
70.04589356667046,
|
261 |
+
69.87176488772906,
|
262 |
+
70.99299734450024,
|
263 |
+
69.66522796064518,
|
264 |
+
71.47854709344725,
|
265 |
+
70.59428853929383,
|
266 |
+
71.06885929305918,
|
267 |
+
71.03087440451709,
|
268 |
+
70.47292658242887,
|
269 |
+
70.20730913532773
|
270 |
+
]
|
271 |
+
}
|
272 |
+
},
|
273 |
+
"metadata": {
|
274 |
+
"duration": {
|
275 |
+
"duration": [
|
276 |
+
21.941982746124268,
|
277 |
+
21.944481134414673,
|
278 |
+
21.99310827255249,
|
279 |
+
21.98748207092285,
|
280 |
+
21.975950717926025,
|
281 |
+
21.933486700057983,
|
282 |
+
21.899983167648315,
|
283 |
+
21.990981340408325,
|
284 |
+
21.964474201202393,
|
285 |
+
21.93599247932434
|
286 |
+
]
|
287 |
+
},
|
288 |
+
"losses": {
|
289 |
+
"validation_loss": [
|
290 |
+
3.3632682040981625,
|
291 |
+
3.3703565641589788,
|
292 |
+
3.361939956571745,
|
293 |
+
3.353457013938738,
|
294 |
+
3.3672675721023393,
|
295 |
+
3.3617931410022406,
|
296 |
+
3.352397534639939,
|
297 |
+
3.3747271255306575,
|
298 |
+
3.3604050234608027,
|
299 |
+
3.3657349874144016
|
300 |
+
],
|
301 |
+
"eval_loss": [
|
302 |
+
3.3331255057583684,
|
303 |
+
3.3345826001270957,
|
304 |
+
3.331943160554637,
|
305 |
+
3.3228615221769915,
|
306 |
+
3.3395521049914154,
|
307 |
+
3.3333527875983195,
|
308 |
+
3.322227156680563,
|
309 |
+
3.3383923292160036,
|
310 |
+
3.3276192856871565,
|
311 |
+
3.3312423260315605
|
312 |
+
]
|
313 |
+
}
|
314 |
+
}
|
315 |
+
},
|
316 |
+
"average": {
|
317 |
+
"intrasentence": {
|
318 |
+
"gender": {
|
319 |
+
"Count": 765.0,
|
320 |
+
"LM Score": 91.59740947458337,
|
321 |
+
"SS Score": 64.71105181453007,
|
322 |
+
"ICAT Score": 64.64226307065297
|
323 |
+
},
|
324 |
+
"profession": {
|
325 |
+
"Count": 2430.0,
|
326 |
+
"LM Score": 91.7124981267572,
|
327 |
+
"SS Score": 61.120441699814556,
|
328 |
+
"ICAT Score": 71.31441555050148
|
329 |
+
},
|
330 |
+
"race": {
|
331 |
+
"Count": 2886.0,
|
332 |
+
"LM Score": 88.89767591252101,
|
333 |
+
"SS Score": 60.036030222786714,
|
334 |
+
"ICAT Score": 71.05352470125911
|
335 |
+
},
|
336 |
+
"religion": {
|
337 |
+
"Count": 237.0,
|
338 |
+
"LM Score": 85.54252873563217,
|
339 |
+
"SS Score": 56.04597701149426,
|
340 |
+
"ICAT Score": 75.1991861540494
|
341 |
+
},
|
342 |
+
"overall": {
|
343 |
+
"Count": 2106.0,
|
344 |
+
"LM Score": 90.18092351400256,
|
345 |
+
"SS Score": 60.88808592651319,
|
346 |
+
"ICAT Score": 70.54286888076189
|
347 |
+
}
|
348 |
+
},
|
349 |
+
"metadata": {
|
350 |
+
"duration": {
|
351 |
+
"duration": 21.956792283058167
|
352 |
+
},
|
353 |
+
"losses": {
|
354 |
+
"validation_loss": 3.3631347122918003,
|
355 |
+
"eval_loss": 3.3314898778822113
|
356 |
+
}
|
357 |
+
}
|
358 |
+
},
|
359 |
+
"std": {
|
360 |
+
"intrasentence": {
|
361 |
+
"gender": {
|
362 |
+
"Count": 0.0,
|
363 |
+
"LM Score": 0.5567449785213698,
|
364 |
+
"SS Score": 0.8386321892300217,
|
365 |
+
"ICAT Score": 1.352146934968328
|
366 |
+
},
|
367 |
+
"profession": {
|
368 |
+
"Count": 0.0,
|
369 |
+
"LM Score": 0.2188887605591837,
|
370 |
+
"SS Score": 0.39790373071920393,
|
371 |
+
"ICAT Score": 0.7091115177929306
|
372 |
+
},
|
373 |
+
"race": {
|
374 |
+
"Count": 0.0,
|
375 |
+
"LM Score": 0.2100667570332781,
|
376 |
+
"SS Score": 0.3884711253005801,
|
377 |
+
"ICAT Score": 0.6531604932587152
|
378 |
+
},
|
379 |
+
"religion": {
|
380 |
+
"Count": 0.0,
|
381 |
+
"LM Score": 0.5460871032380242,
|
382 |
+
"SS Score": 1.1763288250136794,
|
383 |
+
"ICAT Score": 2.0892378391658264
|
384 |
+
},
|
385 |
+
"overall": {
|
386 |
+
"Count": 0.0,
|
387 |
+
"LM Score": 0.11443564147310176,
|
388 |
+
"SS Score": 0.3157932489668835,
|
389 |
+
"ICAT Score": 0.5639751344417095
|
390 |
+
}
|
391 |
+
},
|
392 |
+
"metadata": {
|
393 |
+
"duration": {
|
394 |
+
"duration": 0.029033786698770114
|
395 |
+
},
|
396 |
+
"losses": {
|
397 |
+
"validation_loss": 0.006557608217110893,
|
398 |
+
"eval_loss": 0.005512247633179753
|
399 |
+
}
|
400 |
+
}
|
401 |
+
}
|
402 |
+
}
|
models/unprejudiced_ln_wpe_wte.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:13a7e6a687c028b944c9599154887b4c1fae325f6379cf6ffa0194d3933700c4
|
3 |
+
size 510403881
|
models/unprejudiced_ln_wpe_wte_io.json
ADDED
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"combination": {
|
3 |
+
"num_epoches": 2,
|
4 |
+
"lr": 0.0006,
|
5 |
+
"batch_size": 50,
|
6 |
+
"optimizer": "adam",
|
7 |
+
"in_net": true,
|
8 |
+
"in_net_init_identity": true,
|
9 |
+
"out_net": true,
|
10 |
+
"out_net_init_identity": true,
|
11 |
+
"freeze_ln": false,
|
12 |
+
"freeze_pos": false,
|
13 |
+
"freeze_wte": false,
|
14 |
+
"dup_lm_head": false,
|
15 |
+
"dup_lm_head_bias": false,
|
16 |
+
"freeze_ff": true,
|
17 |
+
"freeze_attn": true,
|
18 |
+
"model_save_path": "official-matrix-results/unprejudiced_ln_wpe_wte_io"
|
19 |
+
},
|
20 |
+
"results": {
|
21 |
+
"intrasentence": {
|
22 |
+
"gender": {
|
23 |
+
"Count": [
|
24 |
+
765.0,
|
25 |
+
765.0,
|
26 |
+
765.0,
|
27 |
+
765.0,
|
28 |
+
765.0,
|
29 |
+
765.0,
|
30 |
+
765.0,
|
31 |
+
765.0,
|
32 |
+
765.0,
|
33 |
+
765.0
|
34 |
+
],
|
35 |
+
"LM Score": [
|
36 |
+
90.55810027331766,
|
37 |
+
91.40096111617851,
|
38 |
+
91.13614260135999,
|
39 |
+
91.07478729000466,
|
40 |
+
91.31158752680491,
|
41 |
+
91.34805106326846,
|
42 |
+
91.53796984014376,
|
43 |
+
91.0110547262721,
|
44 |
+
90.73799445321184,
|
45 |
+
90.68267939789679
|
46 |
+
],
|
47 |
+
"SS Score": [
|
48 |
+
66.81065359761013,
|
49 |
+
66.33071019157975,
|
50 |
+
63.40482609178262,
|
51 |
+
66.13163149250107,
|
52 |
+
66.25976262063219,
|
53 |
+
65.81805360501012,
|
54 |
+
67.2708256316952,
|
55 |
+
65.41252724730985,
|
56 |
+
66.68731654818612,
|
57 |
+
66.6890899760465
|
58 |
+
],
|
59 |
+
"ICAT Score": [
|
60 |
+
60.111283190269944,
|
61 |
+
61.54810897177528,
|
62 |
+
66.70285975641735,
|
63 |
+
61.69108915359916,
|
64 |
+
61.617492772826374,
|
65 |
+
62.44908369462882,
|
66 |
+
59.91924352437381,
|
67 |
+
62.956847510770565,
|
68 |
+
60.4545217254458,
|
69 |
+
60.414451483087234
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"profession": {
|
73 |
+
"Count": [
|
74 |
+
2430.0,
|
75 |
+
2430.0,
|
76 |
+
2430.0,
|
77 |
+
2430.0,
|
78 |
+
2430.0,
|
79 |
+
2430.0,
|
80 |
+
2430.0,
|
81 |
+
2430.0,
|
82 |
+
2430.0,
|
83 |
+
2430.0
|
84 |
+
],
|
85 |
+
"LM Score": [
|
86 |
+
92.29883988595721,
|
87 |
+
92.03481252336664,
|
88 |
+
92.32753282988004,
|
89 |
+
92.51230370502448,
|
90 |
+
92.27759250572994,
|
91 |
+
91.99133809655886,
|
92 |
+
91.58335553792976,
|
93 |
+
92.05163318939707,
|
94 |
+
92.52764533976269,
|
95 |
+
91.85850111659546
|
96 |
+
],
|
97 |
+
"SS Score": [
|
98 |
+
60.63636484894082,
|
99 |
+
61.431217387879386,
|
100 |
+
60.826831671767664,
|
101 |
+
60.633294621677045,
|
102 |
+
60.97102028469007,
|
103 |
+
60.177554097490095,
|
104 |
+
60.69776406386327,
|
105 |
+
60.510909237111456,
|
106 |
+
59.97351667646621,
|
107 |
+
61.01518572937461
|
108 |
+
],
|
109 |
+
"ICAT Score": [
|
110 |
+
72.66435716273696,
|
111 |
+
70.99341353922009,
|
112 |
+
72.33523969750577,
|
113 |
+
72.83809207651267,
|
114 |
+
72.0300057216754,
|
115 |
+
73.26640169699428,
|
116 |
+
71.98861294349621,
|
117 |
+
72.70070595776448,
|
118 |
+
74.0711250631572,
|
119 |
+
71.62173210417018
|
120 |
+
]
|
121 |
+
},
|
122 |
+
"race": {
|
123 |
+
"Count": [
|
124 |
+
2886.0,
|
125 |
+
2886.0,
|
126 |
+
2886.0,
|
127 |
+
2886.0,
|
128 |
+
2886.0,
|
129 |
+
2886.0,
|
130 |
+
2886.0,
|
131 |
+
2886.0,
|
132 |
+
2886.0,
|
133 |
+
2886.0
|
134 |
+
],
|
135 |
+
"LM Score": [
|
136 |
+
89.78556328444421,
|
137 |
+
89.59882439437756,
|
138 |
+
90.62817158489887,
|
139 |
+
89.78041732578544,
|
140 |
+
89.4019756123261,
|
141 |
+
89.8760984753066,
|
142 |
+
89.45344922768207,
|
143 |
+
89.64170601992049,
|
144 |
+
89.50337752742534,
|
145 |
+
89.5530376270789
|
146 |
+
],
|
147 |
+
"SS Score": [
|
148 |
+
58.98249457493492,
|
149 |
+
58.99206829233281,
|
150 |
+
60.880550541384764,
|
151 |
+
60.639728383061005,
|
152 |
+
59.75070862227082,
|
153 |
+
61.331862353884276,
|
154 |
+
60.100876368109006,
|
155 |
+
59.682369145096786,
|
156 |
+
59.95173601499203,
|
157 |
+
60.664880403654834
|
158 |
+
],
|
159 |
+
"ICAT Score": [
|
160 |
+
73.65559658224429,
|
161 |
+
73.485249437038,
|
162 |
+
70.90648355684321,
|
163 |
+
70.67563223650102,
|
164 |
+
71.96732332330302,
|
165 |
+
69.50682693878015,
|
166 |
+
71.38228460068741,
|
167 |
+
72.28282425029819,
|
168 |
+
71.6890978153632,
|
169 |
+
70.45158890554293
|
170 |
+
]
|
171 |
+
},
|
172 |
+
"religion": {
|
173 |
+
"Count": [
|
174 |
+
237.0,
|
175 |
+
237.0,
|
176 |
+
237.0,
|
177 |
+
237.0,
|
178 |
+
237.0,
|
179 |
+
237.0,
|
180 |
+
237.0,
|
181 |
+
237.0,
|
182 |
+
237.0,
|
183 |
+
237.0
|
184 |
+
],
|
185 |
+
"LM Score": [
|
186 |
+
88.16091954022988,
|
187 |
+
91.3103448275862,
|
188 |
+
88.82758620689656,
|
189 |
+
88.25287356321839,
|
190 |
+
86.91954022988506,
|
191 |
+
88.06896551724138,
|
192 |
+
88.73563218390804,
|
193 |
+
90.64367816091954,
|
194 |
+
88.82758620689656,
|
195 |
+
90.73563218390804
|
196 |
+
],
|
197 |
+
"SS Score": [
|
198 |
+
56.50574712643678,
|
199 |
+
52.6896551724138,
|
200 |
+
54.02298850574713,
|
201 |
+
56.50574712643678,
|
202 |
+
55.17241379310345,
|
203 |
+
57.839080459770116,
|
204 |
+
52.50574712643678,
|
205 |
+
55.17241379310345,
|
206 |
+
56.50574712643678,
|
207 |
+
51.54022988505747
|
208 |
+
],
|
209 |
+
"ICAT Score": [
|
210 |
+
76.68986656097239,
|
211 |
+
86.3984780023781,
|
212 |
+
81.68053904082441,
|
213 |
+
76.76985599154446,
|
214 |
+
77.92786365437969,
|
215 |
+
74.26137138327388,
|
216 |
+
84.28865107676046,
|
217 |
+
81.26674593737613,
|
218 |
+
77.2697899326199,
|
219 |
+
87.94055753732329
|
220 |
+
]
|
221 |
+
},
|
222 |
+
"overall": {
|
223 |
+
"Count": [
|
224 |
+
2106.0,
|
225 |
+
2106.0,
|
226 |
+
2106.0,
|
227 |
+
2106.0,
|
228 |
+
2106.0,
|
229 |
+
2106.0,
|
230 |
+
2106.0,
|
231 |
+
2106.0,
|
232 |
+
2106.0,
|
233 |
+
2106.0
|
234 |
+
],
|
235 |
+
"LM Score": [
|
236 |
+
90.7760662806655,
|
237 |
+
90.81699619674852,
|
238 |
+
91.26942210869683,
|
239 |
+
90.9236788413761,
|
240 |
+
90.64143535662458,
|
241 |
+
90.79705183787611,
|
242 |
+
90.4988789023748,
|
243 |
+
90.76825309043707,
|
244 |
+
90.78245132067087,
|
245 |
+
90.61643137472653
|
246 |
+
],
|
247 |
+
"SS Score": [
|
248 |
+
60.507399082548005,
|
249 |
+
60.60793731130887,
|
250 |
+
60.919265773138605,
|
251 |
+
61.17547616132696,
|
252 |
+
60.864189702887124,
|
253 |
+
61.32875246954365,
|
254 |
+
60.94670846663401,
|
255 |
+
60.55107696442072,
|
256 |
+
60.6817519455047,
|
257 |
+
61.21396020036013
|
258 |
+
],
|
259 |
+
"ICAT Score": [
|
260 |
+
71.69965916956987,
|
261 |
+
71.54937614761883,
|
262 |
+
71.33752056938417,
|
263 |
+
70.60137073353715,
|
264 |
+
70.94652038349759,
|
265 |
+
70.22470533316367,
|
266 |
+
70.68558202434455,
|
267 |
+
71.61419660477266,
|
268 |
+
71.38813880042564,
|
269 |
+
70.29305027602956
|
270 |
+
]
|
271 |
+
}
|
272 |
+
},
|
273 |
+
"metadata": {
|
274 |
+
"duration": {
|
275 |
+
"duration": [
|
276 |
+
22.09412384033203,
|
277 |
+
22.17448616027832,
|
278 |
+
22.157798767089844,
|
279 |
+
22.124987363815308,
|
280 |
+
22.15943217277527,
|
281 |
+
22.14798879623413,
|
282 |
+
22.171958208084106,
|
283 |
+
22.13748574256897,
|
284 |
+
22.103014707565308,
|
285 |
+
22.132489681243896
|
286 |
+
]
|
287 |
+
},
|
288 |
+
"losses": {
|
289 |
+
"validation_loss": [
|
290 |
+
3.2715015662753064,
|
291 |
+
3.2904326959796575,
|
292 |
+
3.2856660578561865,
|
293 |
+
3.2837974667549132,
|
294 |
+
3.291322820601256,
|
295 |
+
3.2867543881354124,
|
296 |
+
3.288210924811985,
|
297 |
+
3.2873380064964293,
|
298 |
+
3.2725656410922177,
|
299 |
+
3.2834637714468915
|
300 |
+
],
|
301 |
+
"eval_loss": [
|
302 |
+
3.230092532997546,
|
303 |
+
3.237036979198456,
|
304 |
+
3.2270018466140913,
|
305 |
+
3.229680385278619,
|
306 |
+
3.238819530994996,
|
307 |
+
3.2347138847993766,
|
308 |
+
3.2298984014469645,
|
309 |
+
3.2325261141942896,
|
310 |
+
3.2267001732535983,
|
311 |
+
3.226323487447656
|
312 |
+
]
|
313 |
+
}
|
314 |
+
}
|
315 |
+
},
|
316 |
+
"average": {
|
317 |
+
"intrasentence": {
|
318 |
+
"gender": {
|
319 |
+
"Count": 765.0,
|
320 |
+
"LM Score": 91.07993282884587,
|
321 |
+
"SS Score": 66.08153970023535,
|
322 |
+
"ICAT Score": 61.786498178319434
|
323 |
+
},
|
324 |
+
"profession": {
|
325 |
+
"Count": 2430.0,
|
326 |
+
"LM Score": 92.14635547302021,
|
327 |
+
"SS Score": 60.68736586192606,
|
328 |
+
"ICAT Score": 72.45096859632332
|
329 |
+
},
|
330 |
+
"race": {
|
331 |
+
"Count": 2886.0,
|
332 |
+
"LM Score": 89.72226210792456,
|
333 |
+
"SS Score": 60.09772746997212,
|
334 |
+
"ICAT Score": 71.60029076466014
|
335 |
+
},
|
336 |
+
"religion": {
|
337 |
+
"Count": 237.0,
|
338 |
+
"LM Score": 89.04827586206896,
|
339 |
+
"SS Score": 54.845977011494256,
|
340 |
+
"ICAT Score": 80.44937191174526
|
341 |
+
},
|
342 |
+
"overall": {
|
343 |
+
"Count": 2106.0,
|
344 |
+
"LM Score": 90.7890665310197,
|
345 |
+
"SS Score": 60.879651807767274,
|
346 |
+
"ICAT Score": 71.03401200423437
|
347 |
+
}
|
348 |
+
},
|
349 |
+
"metadata": {
|
350 |
+
"duration": {
|
351 |
+
"duration": 22.14037654399872
|
352 |
+
},
|
353 |
+
"losses": {
|
354 |
+
"validation_loss": 3.2841053339450257,
|
355 |
+
"eval_loss": 3.2312793336225596
|
356 |
+
}
|
357 |
+
}
|
358 |
+
},
|
359 |
+
"std": {
|
360 |
+
"intrasentence": {
|
361 |
+
"gender": {
|
362 |
+
"Count": 0.0,
|
363 |
+
"LM Score": 0.31517930086764584,
|
364 |
+
"SS Score": 1.0227535326876758,
|
365 |
+
"ICAT Score": 1.897985827651108
|
366 |
+
},
|
367 |
+
"profession": {
|
368 |
+
"Count": 0.0,
|
369 |
+
"LM Score": 0.28246655476908605,
|
370 |
+
"SS Score": 0.39584720056563627,
|
371 |
+
"ICAT Score": 0.8224403754402673
|
372 |
+
},
|
373 |
+
"race": {
|
374 |
+
"Count": 0.0,
|
375 |
+
"LM Score": 0.33538694332061014,
|
376 |
+
"SS Score": 0.7437622590088747,
|
377 |
+
"ICAT Score": 1.243856825563018
|
378 |
+
},
|
379 |
+
"religion": {
|
380 |
+
"Count": 0.0,
|
381 |
+
"LM Score": 1.3272234166190238,
|
382 |
+
"SS Score": 1.9790900594306704,
|
383 |
+
"ICAT Score": 4.368815360514757
|
384 |
+
},
|
385 |
+
"overall": {
|
386 |
+
"Count": 0.0,
|
387 |
+
"LM Score": 0.19650915346228764,
|
388 |
+
"SS Score": 0.27691102082541075,
|
389 |
+
"ICAT Score": 0.5275473467667906
|
390 |
+
}
|
391 |
+
},
|
392 |
+
"metadata": {
|
393 |
+
"duration": {
|
394 |
+
"duration": 0.025938524014751836
|
395 |
+
},
|
396 |
+
"losses": {
|
397 |
+
"validation_loss": 0.006491632110858275,
|
398 |
+
"eval_loss": 0.004159973363838499
|
399 |
+
}
|
400 |
+
}
|
401 |
+
}
|
402 |
+
}
|
models/unprejudiced_ln_wpe_wte_io.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5eb46b744944a93fc8f730b00dd979de09dfa73d703b0bea27d7135307e637ed
|
3 |
+
size 515130017
|