zhengxuanzenwu's picture
Upload config.json with huggingface_hub
e22172f
raw
history blame
811 Bytes
{
"alignable_interventions": [
null
],
"alignable_interventions_type": [
"<class 'models.interventions.LowRankRotatedSpaceIntervention'>",
"<class 'models.interventions.LowRankRotatedSpaceIntervention'>"
],
"alignable_model_type": "<class 'transformers.models.gpt2.modeling_gpt2.GPT2Model'>",
"alignable_representations": [
[
0,
"block_output",
"pos",
1,
128,
null,
0,
null
],
[
2,
"block_output",
"pos",
1,
128,
null,
0,
null
]
],
"intervention_dimensions": [
null,
null
],
"mode": "parallel",
"sorted_keys": [
"layer.0.repr.block_output.unit.pos.nunit.1#0",
"layer.2.repr.block_output.unit.pos.nunit.1#0"
],
"transformers_version": "4.34.0"
}