calpt commited on
Commit
f3c0b9c
1 Parent(s): 2f6c9aa

Upload model

Browse files
Files changed (2) hide show
  1. README.md +7 -5
  2. adapter_config.json +1 -1
README.md CHANGED
@@ -1,16 +1,16 @@
1
  ---
2
  tags:
3
  - adapterhub:af/cc100
4
- - adapters
5
  - xmod
 
6
  language:
7
  - af
8
  license: "mit"
9
  ---
10
 
11
- # Adapter `AdapterHub/xmod-base-af_ZA` for xmod_output
12
 
13
- An [adapter](https://adapterhub.ml) for the `xmod_output` model that was trained on the [af/cc100](https://adapterhub.ml/explore/af/cc100/) dataset.
14
 
15
  This adapter was created for usage with the **[Adapters](https://github.com/Adapter-Hub/adapters)** library.
16
 
@@ -27,7 +27,7 @@ Now, the adapter can be loaded and activated like this:
27
  ```python
28
  from adapters import AutoAdapterModel
29
 
30
- model = AutoAdapterModel.from_pretrained("xmod_output")
31
  adapter_name = model.load_adapter("AdapterHub/xmod-base-af_ZA", source="hf", set_active=True)
32
  ```
33
 
@@ -41,6 +41,7 @@ adapter_name = model.load_adapter("AdapterHub/xmod-base-af_ZA", source="hf", set
41
 
42
  ## Citation
43
 
 
44
  @inproceedings{pfeiffer-etal-2022-lifting,
45
  title = "Lifting the Curse of Multilinguality by Pre-training Modular Transformers",
46
  author = "Pfeiffer, Jonas and
@@ -58,4 +59,5 @@ adapter_name = model.load_adapter("AdapterHub/xmod-base-af_ZA", source="hf", set
58
  url = "https://aclanthology.org/2022.naacl-main.255",
59
  doi = "10.18653/v1/2022.naacl-main.255",
60
  pages = "3479--3495"
61
- }
 
 
1
  ---
2
  tags:
3
  - adapterhub:af/cc100
 
4
  - xmod
5
+ - adapters
6
  language:
7
  - af
8
  license: "mit"
9
  ---
10
 
11
+ # Adapter `AdapterHub/xmod-base-af_ZA` for AdapterHub/xmod-base
12
 
13
+ An [adapter](https://adapterhub.ml) for the `AdapterHub/xmod-base` model that was trained on the [af/cc100](https://adapterhub.ml/explore/af/cc100/) dataset.
14
 
15
  This adapter was created for usage with the **[Adapters](https://github.com/Adapter-Hub/adapters)** library.
16
 
 
27
  ```python
28
  from adapters import AutoAdapterModel
29
 
30
+ model = AutoAdapterModel.from_pretrained("AdapterHub/xmod-base")
31
  adapter_name = model.load_adapter("AdapterHub/xmod-base-af_ZA", source="hf", set_active=True)
32
  ```
33
 
 
41
 
42
  ## Citation
43
 
44
+ ```
45
  @inproceedings{pfeiffer-etal-2022-lifting,
46
  title = "Lifting the Curse of Multilinguality by Pre-training Modular Transformers",
47
  author = "Pfeiffer, Jonas and
 
59
  url = "https://aclanthology.org/2022.naacl-main.255",
60
  doi = "10.18653/v1/2022.naacl-main.255",
61
  pages = "3479--3495"
62
+ }
63
+ ```
adapter_config.json CHANGED
@@ -33,7 +33,7 @@
33
  },
34
  "hidden_size": 768,
35
  "model_class": "XmodAdapterModel",
36
- "model_name": "xmod_output",
37
  "model_type": "xmod",
38
  "name": "af_ZA",
39
  "version": "0.0.0"
 
33
  },
34
  "hidden_size": 768,
35
  "model_class": "XmodAdapterModel",
36
+ "model_name": "AdapterHub/xmod-base",
37
  "model_type": "xmod",
38
  "name": "af_ZA",
39
  "version": "0.0.0"