This repository has been archived by the owner on Jul 2, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 43
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
adding adapter weights for mlki (#49)
- Loading branch information
Showing
17 changed files
with
944 additions
and
0 deletions.
There are no files selected for viewing
68 changes: 68 additions & 0 deletions
68
adapters/mlki/bert-base-multilingual-cased_mlki_ep_pfeiffer.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
# Adapter-Hub adapter entry | ||
# Defines a single adapter entry in Adapter-Hub | ||
# -------------------- | ||
|
||
# The type of adapter (one of the options available in `adapter_type`. | ||
type: text_task | ||
|
||
# The string identifier of the task this adapter belongs to. | ||
task: mlki | ||
|
||
# The string identifier of the subtask this adapter belongs to. | ||
subtask: ep | ||
|
||
# The model type. | ||
# Example: bert | ||
model_type: bert | ||
|
||
# The string identifier of the pre-trained model (by which it is identified at Huggingface). | ||
# Example: bert-base-uncased | ||
model_name: bert-base-multilingual-cased | ||
|
||
# The name of the author(s) of this adapter. | ||
author: Yifan Hou | ||
|
||
# Describes the adapter architecture used by this adapter | ||
config: | ||
# The name of the adapter config used by this adapter (a short name available in the `architectures` folder). | ||
# Example: pfeiffer | ||
using: pfeiffer | ||
non_linearity: relu | ||
reduction_factor: 16 | ||
default_version: '1' | ||
|
||
# A list of different versions of this adapter available for download. | ||
files: | ||
- version: '1' | ||
url: https://huggingface.co/yyyyifan/mlkiadapter/resolve/main/bert-base-multilingual-cased_mlki_ep_pfeiffer.zip | ||
sha1: 8a5e6c12252ff8a671a005975ce6a7c4542cac77 | ||
sha256: 69bc89c6f3596f92ce107377c8d0f9415646a4264f8bfff7b5381a5c160d293d | ||
citation: '@article{hou2022adapters, | ||
title={Adapters for Enhanced Modeling of Multilingual Knowledge and Text}, | ||
author={Hou, Yifan and Jiao, Wenxiang and Liu, Meizhen and Allen, Carl and Tu, Zhaopeng and Sachan, Mrinmaya}, | ||
journal={arXiv preprint arXiv:2210.13617}, | ||
year={2022} | ||
}' | ||
|
||
|
||
# (optional) A short description of this adapter. | ||
description: 'Knowledge adapter set for multilingual knowledge graph integration. This adapter is for cross-lingual entity alignment enhancement (phrase-level). We trained it with alignments from Wikidata across 84 languages.' | ||
|
||
# (optional) A contact email of the author(s). | ||
email: [email protected] | ||
|
||
# (optional) A GitHub handle associated with the author(s). | ||
github: eth-nlped | ||
|
||
# (optional) The name of the model class from which this adapter was extracted. This field is mainly intended for adapters with prediction heads. | ||
# Example: BertModelWithHeads | ||
model_class: BertModel | ||
|
||
# (optional) If the adapter has a pre-trained prediction head included. | ||
prediction_head: false | ||
|
||
# (optional) A Twitter handle associated with the author(s). | ||
twitter: https://twitter.com/yyyyyyyyifan | ||
|
||
# (optional) A URL providing more information on this adapter/ the authors/ the organization. | ||
url: https://yifan-h.github.io/ |
67 changes: 67 additions & 0 deletions
67
adapters/mlki/bert-base-multilingual-cased_mlki_es_pfeiffer.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
# Adapter-Hub adapter entry | ||
# Defines a single adapter entry in Adapter-Hub | ||
# -------------------- | ||
|
||
# The type of adapter (one of the options available in `adapter_type`. | ||
type: text_task | ||
|
||
# The string identifier of the task this adapter belongs to. | ||
task: mlki | ||
|
||
# The string identifier of the subtask this adapter belongs to. | ||
subtask: es | ||
|
||
# The model type. | ||
# Example: bert | ||
model_type: bert | ||
|
||
# The string identifier of the pre-trained model (by which it is identified at Huggingface). | ||
# Example: bert-base-uncased | ||
model_name: bert-base-multilingual-cased | ||
|
||
# The name of the author(s) of this adapter. | ||
author: Yifan Hou | ||
|
||
# Describes the adapter architecture used by this adapter | ||
config: | ||
# The name of the adapter config used by this adapter (a short name available in the `architectures` folder). | ||
# Example: pfeiffer | ||
using: pfeiffer | ||
non_linearity: relu | ||
reduction_factor: 16 | ||
default_version: '1' | ||
|
||
# A list of different versions of this adapter available for download. | ||
files: | ||
- version: '1' | ||
url: https://huggingface.co/yyyyifan/mlkiadapter/resolve/main/bert-base-multilingual-cased_mlki_es_pfeiffer.zip | ||
sha1: 206f3e56685c8e66f019ae964283360c7e3bfd2f | ||
sha256: d0125694c13c9e04fe740c619153648711183656f491e01f409322f3e134e82e | ||
citation: '@article{hou2022adapters, | ||
title={Adapters for Enhanced Modeling of Multilingual Knowledge and Text}, | ||
author={Hou, Yifan and Jiao, Wenxiang and Liu, Meizhen and Allen, Carl and Tu, Zhaopeng and Sachan, Mrinmaya}, | ||
journal={arXiv preprint arXiv:2210.13617}, | ||
year={2022} | ||
}' | ||
|
||
# (optional) A short description of this adapter. | ||
description: 'Knowledge adapter set for multilingual knowledge graph integration. This adapter is for cross-lingual entity alignment enhancement (sentence-level). We trained it with alignments from Wikipedia across 84 languages.' | ||
|
||
# (optional) A contact email of the author(s). | ||
email: [email protected] | ||
|
||
# (optional) A GitHub handle associated with the author(s). | ||
github: eth-nlped | ||
|
||
# (optional) The name of the model class from which this adapter was extracted. This field is mainly intended for adapters with prediction heads. | ||
# Example: BertModelWithHeads | ||
model_class: BertModel | ||
|
||
# (optional) If the adapter has a pre-trained prediction head included. | ||
prediction_head: false | ||
|
||
# (optional) A Twitter handle associated with the author(s). | ||
twitter: https://twitter.com/yyyyyyyyifan | ||
|
||
# (optional) A URL providing more information on this adapter/ the authors/ the organization. | ||
url: https://yifan-h.github.io/ |
68 changes: 68 additions & 0 deletions
68
adapters/mlki/bert-base-multilingual-cased_mlki_tp_pfeiffer.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
# Adapter-Hub adapter entry | ||
# Defines a single adapter entry in Adapter-Hub | ||
# -------------------- | ||
|
||
# The type of adapter (one of the options available in `adapter_type`. | ||
type: text_task | ||
|
||
# The string identifier of the task this adapter belongs to. | ||
task: mlki | ||
|
||
# The string identifier of the subtask this adapter belongs to. | ||
subtask: tp | ||
|
||
# The model type. | ||
# Example: bert | ||
model_type: bert | ||
|
||
# The string identifier of the pre-trained model (by which it is identified at Huggingface). | ||
# Example: bert-base-uncased | ||
model_name: bert-base-multilingual-cased | ||
|
||
# The name of the author(s) of this adapter. | ||
author: Yifan Hou | ||
|
||
# Describes the adapter architecture used by this adapter | ||
config: | ||
# The name of the adapter config used by this adapter (a short name available in the `architectures` folder). | ||
# Example: pfeiffer | ||
using: pfeiffer | ||
non_linearity: relu | ||
reduction_factor: 16 | ||
default_version: '1' | ||
|
||
# A list of different versions of this adapter available for download. | ||
files: | ||
- version: '1' | ||
url: https://huggingface.co/yyyyifan/mlkiadapter/resolve/main/bert-base-multilingual-cased_mlki_tp_pfeiffer.zip | ||
sha1: 660778f46daa15746a281b1ac0bfba7185afb62f | ||
sha256: 25f2b9eaa4bece20913bdcde8414263b5dcc3f58b23389b1e1af55cd7ce0a227 | ||
citation: '@article{hou2022adapters, | ||
title={Adapters for Enhanced Modeling of Multilingual Knowledge and Text}, | ||
author={Hou, Yifan and Jiao, Wenxiang and Liu, Meizhen and Allen, Carl and Tu, Zhaopeng and Sachan, Mrinmaya}, | ||
journal={arXiv preprint arXiv:2210.13617}, | ||
year={2022} | ||
}' | ||
|
||
|
||
# (optional) A short description of this adapter. | ||
description: 'Knowledge adapter set for multilingual knowledge graph integration. This adapter is for factual triple enhancement (phrase-level). We trained it with triples from Wikidata across 84 languages.' | ||
|
||
# (optional) A contact email of the author(s). | ||
email: [email protected] | ||
|
||
# (optional) A GitHub handle associated with the author(s). | ||
github: eth-nlped | ||
|
||
# (optional) The name of the model class from which this adapter was extracted. This field is mainly intended for adapters with prediction heads. | ||
# Example: BertModelWithHeads | ||
model_class: BertModel | ||
|
||
# (optional) If the adapter has a pre-trained prediction head included. | ||
prediction_head: false | ||
|
||
# (optional) A Twitter handle associated with the author(s). | ||
twitter: https://twitter.com/yyyyyyyyifan | ||
|
||
# (optional) A URL providing more information on this adapter/ the authors/ the organization. | ||
url: https://yifan-h.github.io/ |
67 changes: 67 additions & 0 deletions
67
adapters/mlki/bert-base-multilingual-cased_mlki_ts_pfeiffer.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
# Adapter-Hub adapter entry | ||
# Defines a single adapter entry in Adapter-Hub | ||
# -------------------- | ||
|
||
# The type of adapter (one of the options available in `adapter_type`. | ||
type: text_task | ||
|
||
# The string identifier of the task this adapter belongs to. | ||
task: mlki | ||
|
||
# The string identifier of the subtask this adapter belongs to. | ||
subtask: ts | ||
|
||
# The model type. | ||
# Example: bert | ||
model_type: bert | ||
|
||
# The string identifier of the pre-trained model (by which it is identified at Huggingface). | ||
# Example: bert-base-uncased | ||
model_name: bert-base-multilingual-cased | ||
|
||
# The name of the author(s) of this adapter. | ||
author: Yifan Hou | ||
|
||
# Describes the adapter architecture used by this adapter | ||
config: | ||
# The name of the adapter config used by this adapter (a short name available in the `architectures` folder). | ||
# Example: pfeiffer | ||
using: pfeiffer | ||
non_linearity: relu | ||
reduction_factor: 16 | ||
default_version: '1' | ||
|
||
# A list of different versions of this adapter available for download. | ||
files: | ||
- version: '1' | ||
url: https://huggingface.co/yyyyifan/mlkiadapter/resolve/main/bert-base-multilingual-cased_mlki_ts_pfeiffer.zip | ||
sha1: d59f317fe26c8fe7c20ef21ae0da4287c35cf170 | ||
sha256: ed5526cfb184545cdc39b9e68db4e4e05dbc9a5e4cfc4a8262eebdbc2ea0244c | ||
citation: '@article{hou2022adapters, | ||
title={Adapters for Enhanced Modeling of Multilingual Knowledge and Text}, | ||
author={Hou, Yifan and Jiao, Wenxiang and Liu, Meizhen and Allen, Carl and Tu, Zhaopeng and Sachan, Mrinmaya}, | ||
journal={arXiv preprint arXiv:2210.13617}, | ||
year={2022} | ||
}' | ||
|
||
# (optional) A short description of this adapter. | ||
description: 'Knowledge adapter set for multilingual knowledge graph integration. This adapter is for factual triple enhancement (sentence-level). We trained it with triples from T-REx across 84 languages.' | ||
|
||
# (optional) A contact email of the author(s). | ||
email: [email protected] | ||
|
||
# (optional) A GitHub handle associated with the author(s). | ||
github: eth-nlped | ||
|
||
# (optional) The name of the model class from which this adapter was extracted. This field is mainly intended for adapters with prediction heads. | ||
# Example: BertModelWithHeads | ||
model_class: BertModel | ||
|
||
# (optional) If the adapter has a pre-trained prediction head included. | ||
prediction_head: false | ||
|
||
# (optional) A Twitter handle associated with the author(s). | ||
twitter: https://twitter.com/yyyyyyyyifan | ||
|
||
# (optional) A URL providing more information on this adapter/ the authors/ the organization. | ||
url: https://yifan-h.github.io/ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
# Adapter-Hub adapter entry | ||
# Defines a single adapter entry in Adapter-Hub | ||
# -------------------- | ||
|
||
# The type of adapter (one of the options available in `adapter_type`. | ||
type: text_task | ||
|
||
# The string identifier of the task this adapter belongs to. | ||
task: mlki | ||
|
||
# The string identifier of the subtask this adapter belongs to. | ||
subtask: ep | ||
|
||
# The model type. | ||
# Example: bert | ||
model_type: xlm-roberta | ||
|
||
# The string identifier of the pre-trained model (by which it is identified at Huggingface). | ||
# Example: bert-base-uncased | ||
model_name: xlm-roberta-base | ||
|
||
# The name of the author(s) of this adapter. | ||
author: Yifan Hou | ||
|
||
# Describes the adapter architecture used by this adapter | ||
config: | ||
# The name of the adapter config used by this adapter (a short name available in the `architectures` folder). | ||
# Example: pfeiffer | ||
using: pfeiffer | ||
non_linearity: relu | ||
reduction_factor: 16 | ||
default_version: '1' | ||
|
||
# A list of different versions of this adapter available for download. | ||
files: | ||
- version: '1' | ||
url: https://huggingface.co/yyyyifan/mlkiadapter/resolve/main/xlm-roberta-base_mlki_ep_pfeiffer.zip | ||
sha1: 6407e5c0723f63b9982f8d6e7592f2aa7c9e788b | ||
sha256: 948628b6268fb4e656ba9f302baa919bff105e0182d6acc9e820cc18456cb4ec | ||
citation: '@article{hou2022adapters, | ||
title={Adapters for Enhanced Modeling of Multilingual Knowledge and Text}, | ||
author={Hou, Yifan and Jiao, Wenxiang and Liu, Meizhen and Allen, Carl and Tu, Zhaopeng and Sachan, Mrinmaya}, | ||
journal={arXiv preprint arXiv:2210.13617}, | ||
year={2022} | ||
}' | ||
|
||
# (optional) A short description of this adapter. | ||
description: 'Knowledge adapter set for multilingual knowledge graph integration. This adapter is for cross-lingual entity alignment enhancement (phrase-level). We trained it with alignments from Wikidata across 84 languages.' | ||
|
||
# (optional) A contact email of the author(s). | ||
email: [email protected] | ||
|
||
# (optional) A GitHub handle associated with the author(s). | ||
github: eth-nlped | ||
|
||
# (optional) The name of the model class from which this adapter was extracted. This field is mainly intended for adapters with prediction heads. | ||
# Example: BertModelWithHeads | ||
model_class: XLMRobertaModel | ||
|
||
# (optional) If the adapter has a pre-trained prediction head included. | ||
prediction_head: false | ||
|
||
# (optional) A Twitter handle associated with the author(s). | ||
twitter: https://twitter.com/yyyyyyyyifan | ||
|
||
# (optional) A URL providing more information on this adapter/ the authors/ the organization. | ||
url: https://yifan-h.github.io/ |
Oops, something went wrong.